Why is this problem important to solve?
What is the intended goal?
What are the key questions that need to be answered?
What is it that we are trying to solve using data science?
#Mounting the drive
from google.colab import drive
drive.mount('/content/drive')
Drive already mounted at /content/drive; to attempt to forcibly remount, call drive.mount("/content/drive", force_remount=True).
#Importing libraries required to load the data
import zipfile
import os
from PIL import Image
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import seaborn as sns
import cv2
import tensorflow as tf
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense, Conv2D, MaxPooling2D, BatchNormalization, Dropout, Flatten, LeakyReLU, GlobalAvgPool2D
from tensorflow.keras.utils import to_categorical
from tensorflow.keras import optimizers
from tensorflow.keras import backend
from tensorflow.keras.preprocessing.image import ImageDataGenerator
from tensorflow.keras.callbacks import EarlyStopping, ModelCheckpoint
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import MinMaxScaler
from sklearn.metrics import classification_report
from sklearn.metrics import confusion_matrix
import random
from random import shuffle
from abc import ABC, abstractmethod
from tensorflow.keras.applications.vgg16 import VGG16
from tensorflow.keras.applications.inception_v3 import InceptionV3
from tensorflow.keras import Model
#to ignore warnings
import warnings
warnings.filterwarnings('ignore')
import logging
# Remove the limit from the number of displayed columns and rows. It helps to see the entire dataframe while printing it
pd.set_option("display.max_columns", None)
pd.set_option("display.max_rows", 200)
class Data():
SIZE = 64
def __init__(self, zip_path, train_dir, test_dir, classifiers):
self.classifiers = classifiers
self._unzip(zip_path)
print("\n\nReading RGB training data")
self.train_images, self.train_labels = self._read_data(dir_to_read_from = train_dir, data_title = 'train data')
print("\nTask Complete")
print("\n\nReading RGB test data")
self.test_images, self.test_labels = self._read_data(dir_to_read_from = test_dir, data_title = 'test data')
print("\nTask Complete")
print("\n\nNormalizing RGB training data")
self.train_images = self._normalize_images(images_list = self.train_images)
print("\nTask Complete")
print("\n\nNormalizing RGB test data")
self.test_images = self._normalize_images(images_list = self.test_images)
print("\nTask Complete")
print("\n\nGenerating HSV training data")
self.train_images_hsv = self._convert_to_hsv(images_list = self.train_images)
print("\nTask Complete")
print("\n\nGenerating HSV test data")
self.test_images_hsv = self._convert_to_hsv(images_list = self.test_images)
print("\nTask Complete")
print("\n\nGenerating 1 hot encoing for train labels")
self.train_labels_1hotencoded = self._one_hot_encoding(labels_list = self.train_labels, classifiers = self.classifiers)
print("\nTask Complete")
print("\n\nGenerating 1 hot encoing for test labels")
self.test_labels_1hotencoded = self._one_hot_encoding(labels_list = self.test_labels, classifiers = self.classifiers)
print("\nTask Complete")
print("\n\nGenerating augmented data for RGB training set")
self.train_generator, self.val_generator = self._augment_data(self.train_images, self.train_labels_1hotencoded)
print("\nTask Complete")
print("\n\nGenerating augmented data for HSV training set")
self.train_generator_hsv, self.val_generator_hsv = self._augment_data(self.train_images_hsv, self.train_labels_1hotencoded)
print("\nTask Complete")
print("\n\nGenerating bigger images for algorithm which needs bigger sized images for RGB training set")
self.train_images_increased_size = self._increase_image_size(75, self.train_images)
print("\nTask Complete")
print("\n\nGenerating bigger images for algorithm which needs bigger sized images for RGB test set")
self.test_images_increased_size = self._increase_image_size(75, self.test_images)
print("\nTask Complete")
print("\n\nGenerating bigger images for algorithm which needs bigger sized images for HSV training set")
self.train_images_hsv_increased_size = self._convert_to_hsv(images_list = self.train_images_increased_size)
print("\nTask Complete")
print("\n\nGenerating bigger images for algorithm which needs bigger sized images for HSV test set")
self.test_images_hsv_increased_size = self._convert_to_hsv(images_list = self.test_images_increased_size)
print("\nTask Complete")
print("\n\nGenerating augmented data for bigger images for algorithm which needs bigger sized images for RGB training set")
self.train_generator_increased_size, self.val_generator_increased_size = self._augment_data(self.train_images_increased_size, self.train_labels_1hotencoded)
print("\nTask Complete")
print("\n\nGenerating augmented data for bigger images for algorithm which needs bigger sized images for HSV training set")
self.train_generator_hsv_increased_size, self.val_generator_hsv_increased_size = self._augment_data(self.train_images_hsv_increased_size, self.train_labels_1hotencoded)
print("\nTask Complete")
def _unzip(self, zip_path):
with zipfile.ZipFile(zip_path, 'r') as zip_ref:
zip_ref.extractall()
def _read_data(self, dir_to_read_from, data_title):
images_list = []
labels_list = []
for idx, classifier in enumerate(self.classifiers):
folder_path = "/" + classifier + "/"
images_path = os.listdir(dir_to_read_from + folder_path)
for i, image_name in enumerate(images_path):
try:
image = Image.open(dir_to_read_from + folder_path + image_name)
image = image.resize((Data.SIZE, Data.SIZE))
images_list.append(np.array(image))
labels_list.append(idx)
except Exception:
pass
np_image_list = np.array(images_list)
np_label_list = np.array(labels_list)
self.print_info(np_image_list, np_label_list, data_title)
return (np_image_list, np_label_list)
def print_info(self, images, labels, data_title):
print("\n\nInformation about " + data_title)
print("\n")
print(data_title + " images shape: ", images.shape)
print(data_title + " labels shape: ", labels.shape)
print("\n")
print(data_title + " min pixel value:", images.min())
print(data_title + " max pixel value:", images.max())
def _normalize_images(self, images_list):
images_list = (images_list/255).astype('float32')
return images_list
def _augment_data(self, train_images, train_labels):
np.random.seed(42)
random.seed(42)
tf.random.set_seed(42)
X_train, X_val, y_train, y_val = train_test_split(train_images, train_labels, test_size=0.2, random_state=42)
train_datagen = ImageDataGenerator(
horizontal_flip=True,
shear_range=0.2,
zoom_range = 0.2,
rotation_range=20)
val_datagen = ImageDataGenerator()
train_generator = train_datagen.flow(x = X_train, y = y_train, batch_size=64, seed=42, shuffle=True)
val_generator = val_datagen.flow(x= X_val, y = y_val, batch_size=64, seed=42, shuffle=True)
return (train_generator, val_generator)
def _convert_to_hsv(self, images_list):
hsv_image_list = []
for img in images_list:
hsv_image_list.append(cv2.cvtColor(img, cv2.COLOR_BGR2HSV))
return np.array(hsv_image_list)
def _one_hot_encoding(self, labels_list, classifiers):
labels_list=to_categorical(labels_list, len(classifiers))
return labels_list
def _increase_image_size(self, new_size, images):
return np.array([cv2.resize(image, (new_size, new_size), interpolation = cv2.INTER_CUBIC) for image in images])
def visualize_data(self, images, labels, classifiers, rows, cols, title):
print('Shape of ' + title)
print(images.shape)
print('Some images from ' + title)
np.random.seed(42)
total = rows*cols
plt.figure(1 , figsize = (total , total))
logger = logging.getLogger()
old_level = logger.level
logger.setLevel(100)
for n in range(1, total+1):
plt.subplot(rows, cols, n)
index = int(np.random.randint(0, images.shape[0], 1))
plt.title(classifiers[labels[index]])
plt.imshow(images[index])
plt.axis('off')
logger.setLevel(old_level)
def plot_data_to_check_balance(self, labels, classifiers, title):
print('Checking balance of ' + title)
df_labels = pd.DataFrame(labels, columns = ['classifiers'])
print(df_labels.value_counts())
sns.set_theme(style="darkgrid")
plt.title("Checking data balance for " + title)
sns.countplot(x="classifiers", data=df_labels)
plt.show()
def print_mean_img(self, classifiers, images, labels, data_title):
print('Mean images for ' + data_title)
image_data_for_each_classifier = {}
for idx, classifier in enumerate(classifiers):
image_data_for_each_classifier[idx] = []
for img, label in zip(images, labels):
image_data_for_each_classifier[label].append([img])
for title_idx, corresponding_images in image_data_for_each_classifier.items():
full_mat = np.array(corresponding_images)
title = classifiers[title_idx]
mean_img = np.mean(full_mat, axis = 0)[0]
plt.imshow(mean_img)
plt.title(f'Average {title}')
plt.axis('off')
plt.show()
def gaussian_blur(self, images, labels, data_title):
print('Gaussian blur images for ' + data_title)
gbx=[] ## to hold the blurred images
for i in np.arange(0,100,1):
b= cv2.GaussianBlur(images[i], (5, 5), 0)
gbx.append(b)
gbx=np.array(gbx)
viewimage=np.random.randint(1,100,5)
fig,ax=plt.subplots(1,5,figsize=(18,18))
for t,i in zip(range(5),viewimage):
Title=labels[i]
ax[t].set_title(Title)
ax[t].imshow(gbx[i])
ax[t].set_axis_off()
fig.tight_layout()
data = Data (
zip_path = '/content/drive/MyDrive/MIT/Notebooks/Capstone Project/cell_images.zip',
train_dir = '/content/cell_images/train',
test_dir = '/content/cell_images/test',
classifiers = ['uninfected','parasitized'])
Reading RGB training data Information about train data train data images shape: (24958, 64, 64, 3) train data labels shape: (24958,) train data min pixel value: 0 train data max pixel value: 255 Task Complete Reading RGB test data Information about test data test data images shape: (2600, 64, 64, 3) test data labels shape: (2600,) test data min pixel value: 0 test data max pixel value: 255 Task Complete Normalizing RGB training data Task Complete Normalizing RGB test data Task Complete Generating HSV training data Task Complete Generating HSV test data Task Complete Generating 1 hot encoing for train labels Task Complete Generating 1 hot encoing for test labels Task Complete Generating augmented data for RGB training set Task Complete Generating augmented data for HSV training set Task Complete Generating bigger images for algorithm which needs bigger sized images for RGB training set Task Complete Generating bigger images for algorithm which needs bigger sized images for RGB test set Task Complete Generating bigger images for algorithm which needs bigger sized images for HSV training set Task Complete Generating bigger images for algorithm which needs bigger sized images for HSV test set Task Complete Generating augmented data for bigger images for algorithm which needs bigger sized images for RGB training set Task Complete Generating augmented data for bigger images for algorithm which needs bigger sized images for HSV training set Task Complete
There are a total of 24,958 train and 2,600 test images (colored) that we have taken from microscopic images. These images are of the following categories:
Parasitized: The parasitized cells contain the Plasmodium parasite which causes malaria
Uninfected: The uninfected cells are free of the Plasmodium parasites
The files have been extracted to the local session of Google Colab. The extracted folder would have the following structure:
The extracted folder has different folders for train and test data which further contains the different sizes of images for parasitized and uninfected cells within the respective folder name.
The size of all images is scaled to 64x64 and are converted to 4D arrays so that they can be used as an input for the convolutional neural network. Also, we have created the labels for both types of images to be able to train and test the model.
data.visualize_data(images = data.train_images, labels = data.train_labels, classifiers = data.classifiers, rows = 4, cols = 4, title = 'train data')
Shape of train data (24958, 64, 64, 3) Some images from train data
Data is read in numpy arrays train_images and train_labels:
train_images (4D array):
train_labels (1D array):
data.visualize_data(images = data.test_images, labels = data.test_labels, classifiers = data.classifiers, rows = 4, cols = 4, title = 'test data')
Shape of test data (2600, 64, 64, 3) Some images from test data
Data is read in numpy arrays test_images and test_labels:
test_images (4D array):
test_labels (1D array):
data.plot_data_to_check_balance(labels = data.train_labels, classifiers = data.classifiers, title = 'train data')
Checking balance of train data classifiers 1 12582 0 12376 dtype: int64
Data distribution for train data:
Data is well balanced in training data (helps with unbiased model creation)
data.plot_data_to_check_balance(labels = data.test_labels, classifiers = data.classifiers, title = 'test data')
Checking balance of test data classifiers 0 1300 1 1300 dtype: int64
Data distribution for test data:
Data is well balanced in test data (helps with unbiased model creation)
Lets have a look at the mean images if both training and test data
data.print_mean_img(classifiers = data.classifiers, images = data.train_images, labels = data.train_labels, data_title = 'train data')
Mean images for train data
data.print_mean_img(classifiers = data.classifiers, images = data.test_images, labels = data.test_labels, data_title = 'test data')
Mean images for test data
# We have already normalized data after reading it in our Data class
data.print_info(data.train_images, data.train_labels, '')
data.print_info(data.test_images, data.test_labels, '')
Information about images shape: (24958, 64, 64, 3) labels shape: (24958,) min pixel value: 0.0 max pixel value: 1.0 Information about images shape: (2600, 64, 64, 3) labels shape: (2600,) min pixel value: 0.0 max pixel value: 1.0
As both train data images and test data images have pixel value ranging from 0 to 255, we have normalized those values by dividing them by 255. So, now all values lie between 0 and 1 and their data type has changed from integer to floating point number.
Old value range:
data.visualize_data(images = data.train_images_hsv, labels = data.train_labels, classifiers = data.classifiers, rows = 4, cols = 4, title = 'train data')
Shape of train data (24958, 64, 64, 3) Some images from train data
data.visualize_data(images = data.test_images_hsv, labels = data.test_labels, classifiers = data.classifiers, rows = 4, cols = 4, title = 'test data')
Shape of test data (2600, 64, 64, 3) Some images from test data
We have four different types of data:
We have created following class heirarchy to encapsulate our functionality (Sub points are corresponding child classes under the parent class):
class ModelData(ABC):
def __init__(self, data, is_data_augmented):
self.classifiers = data.classifiers
self.test_labels = data.test_labels_1hotencoded
# This will be used while deciding weather to use plain data or image generator data while we fit the model
self.is_data_augmented = is_data_augmented
class RgbDataBase(ModelData):
def __init__(self, data, is_increased_size_required = False, is_data_augmented = False):
if (is_increased_size_required == False):
self.test_images = data.test_images
else:
self.test_images = data.test_images_increased_size
ModelData.__init__(self, data, is_data_augmented)
class RgbData(RgbDataBase):
def __init__(self, data, is_increased_size_required = False):
if (is_increased_size_required == False):
self.train_images = data.train_images
else:
self.train_images = data.train_images_increased_size
self.train_labels = data.train_labels_1hotencoded
RgbDataBase.__init__(self, data, is_increased_size_required, False)
class RgbAugmentedData(RgbDataBase):
def __init__(self, data, is_increased_size_required = False):
if (is_increased_size_required == False):
self.train_generator = data.train_generator
self.val_generator = data.val_generator
else:
self.train_generator = data.train_generator_increased_size
self.val_generator = data.val_generator_increased_size
RgbDataBase.__init__(self, data, is_increased_size_required, True)
class HsvDataBase(ModelData):
def __init__(self, data, is_increased_size_required = False, is_data_augmented = False):
if (is_increased_size_required == False):
self.test_images = data.test_images_hsv
else:
self.test_images = data.test_images_hsv_increased_size
ModelData.__init__(self, data, is_data_augmented)
class HsvData(HsvDataBase):
def __init__(self, data, is_increased_size_required = False):
if (is_increased_size_required == False):
self.train_images = data.train_images_hsv
else:
self.train_images = data.train_images_hsv_increased_size
self.train_labels = data.train_labels_1hotencoded
HsvDataBase.__init__(self, data, is_increased_size_required, False)
class HsvAugmentedData(HsvDataBase):
def __init__(self, data, is_increased_size_required = False):
if (is_increased_size_required == False):
self.train_generator = data.train_generator_hsv
self.val_generator = data.val_generator_hsv
else:
self.train_generator = data.train_generator_hsv_increased_size
self.val_generator = data.val_generator_hsv_increased_size
HsvDataBase.__init__(self, data, is_increased_size_required, True)
rgbData = RgbData(data)
rgbAugmentedData = RgbAugmentedData(data)
hsvData = HsvData(data)
hsvAugmentedData = HsvAugmentedData(data)
We have five different types of model definitions:
We have created following class heirarchy to encapsulate our models functionality (Sub points are corresonding child classes under the parent class):
The classes are is depicted in the following class diagram:
class AbstractModel(ABC):
def __init__(self, model, modelData):
self.model = model
self.modelData = modelData
def execute(self):
self.clear_session()
self.define_model()
self.summarize_model()
self.compile_model()
self.define_callbacks()
self.fit_model()
self.calculate_accuracy()
self.plot_confusion_matrix()
self.plot_train_val_accuracy()
def clear_session(self):
backend.clear_session()
np.random.seed(42)
random.seed(42)
tf.random.set_seed(42)
@abstractmethod
def define_model(self):
pass
def summarize_model(self):
self.model.summary()
def compile_model(self):
adam = optimizers.Adam(learning_rate=0.001)
self.model.compile(loss='binary_crossentropy', optimizer=adam, metrics=['accuracy'])
def define_callbacks(self):
self.callbacks = [EarlyStopping(monitor='val_loss', patience=2), ModelCheckpoint('.mdl_wts.hdf5', monitor='val_loss', save_best_only=True)]
def fit_model(self):
if (self.modelData.is_data_augmented):
self.history = self.model.fit(self.modelData.train_generator, validation_data=self.modelData.val_generator, batch_size=32,callbacks=self.callbacks, epochs=20,verbose=1)
else:
self.history = self.model.fit(self.modelData.train_images, self.modelData.train_labels, batch_size=32, callbacks=self.callbacks, validation_split=0.2, epochs=20, verbose=1)
def calculate_accuracy(self):
print('\n', 'Calculating Accuracy:')
self.accuracy = self.model.evaluate(self.modelData.test_images, self.modelData.test_labels, verbose=1)
print('\n', 'Test Accuracy:', self.accuracy[1])
print('\n')
def plot_confusion_matrix(self):
pred = self.model.predict(self.modelData.test_images)
pred = np.argmax(pred,axis = 1)
y_true = np.argmax(self.modelData.test_labels,axis = 1)
print('\n', 'Classification Report')
#Printing the classification report
print(classification_report(y_true,pred))
print('\n\n', 'Confusion Matrix')
#Plotting the heatmap using confusion matrix
cm = confusion_matrix(y_true,pred)
plt.figure(figsize=(8,5))
sns.heatmap(cm, annot=True, fmt='.0f', xticklabels=self.modelData.classifiers, yticklabels=self.modelData.classifiers)
plt.ylabel('Actual')
plt.xlabel('Predicted')
plt.show()
print('\n')
def plot_train_val_accuracy(self):
print('\n\n Train and Validation Accuracy')
N = len(self.history.history["accuracy"])
plt.figure(figsize=(7,7))
plt.plot(np.arange(0, N), self.history.history["accuracy"], label="train_accuracy", ls='--')
plt.plot(np.arange(0, N), self.history.history["val_accuracy"], label="val_accuracy", ls='--')
plt.title("Accuracy vs Epoch")
plt.xlabel("Epochs")
plt.ylabel("Accuracy")
plt.legend(loc="upper left")
class BaseModel(AbstractModel):
def __init__(self, modelData, name):
self.model = Sequential()
self.name = name
AbstractModel.__init__(self, self.model, modelData)
def define_model(self):
self.model.add(Conv2D(filters=32,kernel_size=2,padding="same",activation="relu",input_shape=(64,64,3)))
self.model.add(MaxPooling2D(pool_size=2))
self.model.add(Dropout(0.2))
self.model.add(Conv2D(filters=32,kernel_size=2,padding="same",activation="relu"))
self.model.add(MaxPooling2D(pool_size=2))
self.model.add(Dropout(0.2))
self.model.add(Conv2D(filters=32,kernel_size=2,padding="same",activation="relu"))
self.model.add(MaxPooling2D(pool_size=2))
self.model.add(Dropout(0.2))
self.model.add(Flatten())
self.model.add(Dense(512,activation="relu"))
self.model.add(Dropout(0.4))
self.model.add(Dense(len(self.modelData.classifiers),activation="softmax"))
model00 = BaseModel(rgbData, 'Base model | RGB data')
model00.execute()
Model: "sequential"
_________________________________________________________________
Layer (type) Output Shape Param #
=================================================================
conv2d (Conv2D) (None, 64, 64, 32) 416
max_pooling2d (MaxPooling2D (None, 32, 32, 32) 0
)
dropout (Dropout) (None, 32, 32, 32) 0
conv2d_1 (Conv2D) (None, 32, 32, 32) 4128
max_pooling2d_1 (MaxPooling (None, 16, 16, 32) 0
2D)
dropout_1 (Dropout) (None, 16, 16, 32) 0
conv2d_2 (Conv2D) (None, 16, 16, 32) 4128
max_pooling2d_2 (MaxPooling (None, 8, 8, 32) 0
2D)
dropout_2 (Dropout) (None, 8, 8, 32) 0
flatten (Flatten) (None, 2048) 0
dense (Dense) (None, 512) 1049088
dropout_3 (Dropout) (None, 512) 0
dense_1 (Dense) (None, 2) 1026
=================================================================
Total params: 1,058,786
Trainable params: 1,058,786
Non-trainable params: 0
_________________________________________________________________
Epoch 1/20
624/624 [==============================] - 30s 47ms/step - loss: 0.4311 - accuracy: 0.7872 - val_loss: 0.3561 - val_accuracy: 0.8856
Epoch 2/20
624/624 [==============================] - 28s 46ms/step - loss: 0.1300 - accuracy: 0.9610 - val_loss: 0.1481 - val_accuracy: 0.9311
Epoch 3/20
624/624 [==============================] - 28s 45ms/step - loss: 0.1013 - accuracy: 0.9689 - val_loss: 0.1106 - val_accuracy: 0.9495
Epoch 4/20
624/624 [==============================] - 28s 45ms/step - loss: 0.0822 - accuracy: 0.9735 - val_loss: 0.1279 - val_accuracy: 0.9433
Epoch 5/20
624/624 [==============================] - 28s 45ms/step - loss: 0.0700 - accuracy: 0.9782 - val_loss: 0.0942 - val_accuracy: 0.9613
Epoch 6/20
624/624 [==============================] - 29s 46ms/step - loss: 0.0691 - accuracy: 0.9783 - val_loss: 0.0956 - val_accuracy: 0.9651
Epoch 7/20
624/624 [==============================] - 28s 45ms/step - loss: 0.0640 - accuracy: 0.9802 - val_loss: 0.0862 - val_accuracy: 0.9653
Epoch 8/20
624/624 [==============================] - 28s 45ms/step - loss: 0.0650 - accuracy: 0.9788 - val_loss: 0.1629 - val_accuracy: 0.9395
Epoch 9/20
624/624 [==============================] - 28s 45ms/step - loss: 0.0640 - accuracy: 0.9788 - val_loss: 0.1104 - val_accuracy: 0.9579
Calculating Accuracy:
82/82 [==============================] - 1s 12ms/step - loss: 0.0672 - accuracy: 0.9812
Test Accuracy: 0.9811538457870483
Classification Report
precision recall f1-score support
0 0.97 0.99 0.98 1300
1 0.99 0.97 0.98 1300
accuracy 0.98 2600
macro avg 0.98 0.98 0.98 2600
weighted avg 0.98 0.98 0.98 2600
Confusion Matrix
Train and Validation Accuracy
model01 = BaseModel(rgbAugmentedData, 'Base model | RGB data | Data augmentation')
model01.execute()
Model: "sequential"
_________________________________________________________________
Layer (type) Output Shape Param #
=================================================================
conv2d (Conv2D) (None, 64, 64, 32) 416
max_pooling2d (MaxPooling2D (None, 32, 32, 32) 0
)
dropout (Dropout) (None, 32, 32, 32) 0
conv2d_1 (Conv2D) (None, 32, 32, 32) 4128
max_pooling2d_1 (MaxPooling (None, 16, 16, 32) 0
2D)
dropout_1 (Dropout) (None, 16, 16, 32) 0
conv2d_2 (Conv2D) (None, 16, 16, 32) 4128
max_pooling2d_2 (MaxPooling (None, 8, 8, 32) 0
2D)
dropout_2 (Dropout) (None, 8, 8, 32) 0
flatten (Flatten) (None, 2048) 0
dense (Dense) (None, 512) 1049088
dropout_3 (Dropout) (None, 512) 0
dense_1 (Dense) (None, 2) 1026
=================================================================
Total params: 1,058,786
Trainable params: 1,058,786
Non-trainable params: 0
_________________________________________________________________
Epoch 1/20
312/312 [==============================] - 34s 106ms/step - loss: 0.6081 - accuracy: 0.6631 - val_loss: 0.2875 - val_accuracy: 0.9231
Epoch 2/20
312/312 [==============================] - 33s 105ms/step - loss: 0.1746 - accuracy: 0.9377 - val_loss: 0.1252 - val_accuracy: 0.9595
Epoch 3/20
312/312 [==============================] - 33s 105ms/step - loss: 0.1177 - accuracy: 0.9628 - val_loss: 0.0944 - val_accuracy: 0.9694
Epoch 4/20
312/312 [==============================] - 33s 105ms/step - loss: 0.1009 - accuracy: 0.9666 - val_loss: 0.0859 - val_accuracy: 0.9770
Epoch 5/20
312/312 [==============================] - 33s 104ms/step - loss: 0.0909 - accuracy: 0.9709 - val_loss: 0.0841 - val_accuracy: 0.9730
Epoch 6/20
312/312 [==============================] - 33s 105ms/step - loss: 0.0868 - accuracy: 0.9720 - val_loss: 0.0863 - val_accuracy: 0.9671
Epoch 7/20
312/312 [==============================] - 33s 104ms/step - loss: 0.0849 - accuracy: 0.9715 - val_loss: 0.0797 - val_accuracy: 0.9732
Epoch 8/20
312/312 [==============================] - 33s 105ms/step - loss: 0.0820 - accuracy: 0.9734 - val_loss: 0.0720 - val_accuracy: 0.9768
Epoch 9/20
312/312 [==============================] - 33s 105ms/step - loss: 0.0786 - accuracy: 0.9738 - val_loss: 0.0736 - val_accuracy: 0.9746
Epoch 10/20
312/312 [==============================] - 33s 104ms/step - loss: 0.0793 - accuracy: 0.9746 - val_loss: 0.0790 - val_accuracy: 0.9740
Calculating Accuracy:
82/82 [==============================] - 1s 12ms/step - loss: 0.0576 - accuracy: 0.9804
Test Accuracy: 0.9803845882415771
Classification Report
precision recall f1-score support
0 0.97 0.99 0.98 1300
1 0.99 0.97 0.98 1300
accuracy 0.98 2600
macro avg 0.98 0.98 0.98 2600
weighted avg 0.98 0.98 0.98 2600
Confusion Matrix
Train and Validation Accuracy
model02 = BaseModel(hsvData, 'Base model | HSV data')
model02.execute()
Model: "sequential"
_________________________________________________________________
Layer (type) Output Shape Param #
=================================================================
conv2d (Conv2D) (None, 64, 64, 32) 416
max_pooling2d (MaxPooling2D (None, 32, 32, 32) 0
)
dropout (Dropout) (None, 32, 32, 32) 0
conv2d_1 (Conv2D) (None, 32, 32, 32) 4128
max_pooling2d_1 (MaxPooling (None, 16, 16, 32) 0
2D)
dropout_1 (Dropout) (None, 16, 16, 32) 0
conv2d_2 (Conv2D) (None, 16, 16, 32) 4128
max_pooling2d_2 (MaxPooling (None, 8, 8, 32) 0
2D)
dropout_2 (Dropout) (None, 8, 8, 32) 0
flatten (Flatten) (None, 2048) 0
dense (Dense) (None, 512) 1049088
dropout_3 (Dropout) (None, 512) 0
dense_1 (Dense) (None, 2) 1026
=================================================================
Total params: 1,058,786
Trainable params: 1,058,786
Non-trainable params: 0
_________________________________________________________________
Epoch 1/20
624/624 [==============================] - 30s 46ms/step - loss: 1.7677 - accuracy: 0.5931 - val_loss: 0.8032 - val_accuracy: 0.0355
Epoch 2/20
624/624 [==============================] - 29s 46ms/step - loss: 0.6642 - accuracy: 0.6166 - val_loss: 0.9845 - val_accuracy: 0.0120
Epoch 3/20
624/624 [==============================] - 29s 46ms/step - loss: 0.6576 - accuracy: 0.6210 - val_loss: 0.8398 - val_accuracy: 0.0497
Calculating Accuracy:
82/82 [==============================] - 1s 12ms/step - loss: 0.6901 - accuracy: 0.5138
Test Accuracy: 0.5138461589813232
Classification Report
precision recall f1-score support
0 0.51 0.97 0.67 1300
1 0.66 0.06 0.10 1300
accuracy 0.51 2600
macro avg 0.59 0.51 0.39 2600
weighted avg 0.59 0.51 0.39 2600
Confusion Matrix
Train and Validation Accuracy
model03 = BaseModel(hsvAugmentedData, 'Base model | HSV data | Data augmentation')
model03.execute()
Model: "sequential"
_________________________________________________________________
Layer (type) Output Shape Param #
=================================================================
conv2d (Conv2D) (None, 64, 64, 32) 416
max_pooling2d (MaxPooling2D (None, 32, 32, 32) 0
)
dropout (Dropout) (None, 32, 32, 32) 0
conv2d_1 (Conv2D) (None, 32, 32, 32) 4128
max_pooling2d_1 (MaxPooling (None, 16, 16, 32) 0
2D)
dropout_1 (Dropout) (None, 16, 16, 32) 0
conv2d_2 (Conv2D) (None, 16, 16, 32) 4128
max_pooling2d_2 (MaxPooling (None, 8, 8, 32) 0
2D)
dropout_2 (Dropout) (None, 8, 8, 32) 0
flatten (Flatten) (None, 2048) 0
dense (Dense) (None, 512) 1049088
dropout_3 (Dropout) (None, 512) 0
dense_1 (Dense) (None, 2) 1026
=================================================================
Total params: 1,058,786
Trainable params: 1,058,786
Non-trainable params: 0
_________________________________________________________________
Epoch 1/20
312/312 [==============================] - 34s 107ms/step - loss: 2.6783 - accuracy: 0.5080 - val_loss: 0.6902 - val_accuracy: 0.5315
Epoch 2/20
312/312 [==============================] - 34s 108ms/step - loss: 0.6957 - accuracy: 0.5206 - val_loss: 0.6901 - val_accuracy: 0.5521
Epoch 3/20
312/312 [==============================] - 33s 105ms/step - loss: 0.6897 - accuracy: 0.5321 - val_loss: 0.6834 - val_accuracy: 0.5771
Epoch 4/20
312/312 [==============================] - 33s 105ms/step - loss: 0.6856 - accuracy: 0.5474 - val_loss: 0.6805 - val_accuracy: 0.5715
Epoch 5/20
312/312 [==============================] - 33s 106ms/step - loss: 0.6824 - accuracy: 0.5560 - val_loss: 0.6790 - val_accuracy: 0.5789
Epoch 6/20
312/312 [==============================] - 33s 106ms/step - loss: 0.6815 - accuracy: 0.5559 - val_loss: 0.6734 - val_accuracy: 0.5859
Epoch 7/20
312/312 [==============================] - 33s 106ms/step - loss: 0.6777 - accuracy: 0.5662 - val_loss: 0.6804 - val_accuracy: 0.5579
Epoch 8/20
312/312 [==============================] - 33s 106ms/step - loss: 0.6787 - accuracy: 0.5627 - val_loss: 0.6673 - val_accuracy: 0.5653
Epoch 9/20
312/312 [==============================] - 33s 106ms/step - loss: 0.6752 - accuracy: 0.5706 - val_loss: 0.6914 - val_accuracy: 0.5272
Epoch 10/20
312/312 [==============================] - 33s 107ms/step - loss: 0.6726 - accuracy: 0.5750 - val_loss: 0.7142 - val_accuracy: 0.5194
Calculating Accuracy:
82/82 [==============================] - 1s 12ms/step - loss: 0.6970 - accuracy: 0.5454
Test Accuracy: 0.5453845858573914
Classification Report
precision recall f1-score support
0 0.53 0.80 0.64 1300
1 0.59 0.30 0.39 1300
accuracy 0.55 2600
macro avg 0.56 0.55 0.52 2600
weighted avg 0.56 0.55 0.52 2600
Confusion Matrix
Train and Validation Accuracy
class ModelWithLeakyRelu(AbstractModel):
def __init__(self, modelData, name):
self.model = Sequential()
self.name = name
AbstractModel.__init__(self, self.model, modelData)
def define_model(self):
self.model.add(Conv2D(filters=32,kernel_size=2,padding="same",input_shape=(64,64,3)))
self.model.add(LeakyReLU(0.1))
self.model.add(MaxPooling2D(pool_size=2))
self.model.add(Dropout(0.2))
self.model.add(Conv2D(filters=32,kernel_size=2,padding="same"))
self.model.add(LeakyReLU(0.1))
self.model.add(MaxPooling2D(pool_size=2))
self.model.add(Dropout(0.2))
self.model.add(Conv2D(filters=32,kernel_size=2,padding="same"))
self.model.add(LeakyReLU(0.1))
self.model.add(MaxPooling2D(pool_size=2))
self.model.add(Dropout(0.2))
self.model.add(Flatten())
self.model.add(Dense(512))
self.model.add(LeakyReLU(0.1))
self.model.add(Dropout(0.4))
self.model.add(Dense(len(self.modelData.classifiers),activation="softmax"))
model10 = ModelWithLeakyRelu(rgbData, 'Model With Leaky Relu | RGB data')
model10.execute()
Model: "sequential"
_________________________________________________________________
Layer (type) Output Shape Param #
=================================================================
conv2d (Conv2D) (None, 64, 64, 32) 416
leaky_re_lu (LeakyReLU) (None, 64, 64, 32) 0
max_pooling2d (MaxPooling2D (None, 32, 32, 32) 0
)
dropout (Dropout) (None, 32, 32, 32) 0
conv2d_1 (Conv2D) (None, 32, 32, 32) 4128
leaky_re_lu_1 (LeakyReLU) (None, 32, 32, 32) 0
max_pooling2d_1 (MaxPooling (None, 16, 16, 32) 0
2D)
dropout_1 (Dropout) (None, 16, 16, 32) 0
conv2d_2 (Conv2D) (None, 16, 16, 32) 4128
leaky_re_lu_2 (LeakyReLU) (None, 16, 16, 32) 0
max_pooling2d_2 (MaxPooling (None, 8, 8, 32) 0
2D)
dropout_2 (Dropout) (None, 8, 8, 32) 0
flatten (Flatten) (None, 2048) 0
dense (Dense) (None, 512) 1049088
leaky_re_lu_3 (LeakyReLU) (None, 512) 0
dropout_3 (Dropout) (None, 512) 0
dense_1 (Dense) (None, 2) 1026
=================================================================
Total params: 1,058,786
Trainable params: 1,058,786
Non-trainable params: 0
_________________________________________________________________
Epoch 1/20
624/624 [==============================] - 105s 167ms/step - loss: 0.4456 - accuracy: 0.7859 - val_loss: 0.3965 - val_accuracy: 0.8702
Epoch 2/20
624/624 [==============================] - 104s 166ms/step - loss: 0.1325 - accuracy: 0.9611 - val_loss: 0.3084 - val_accuracy: 0.8994
Epoch 3/20
624/624 [==============================] - 104s 167ms/step - loss: 0.0937 - accuracy: 0.9717 - val_loss: 0.1266 - val_accuracy: 0.9481
Epoch 4/20
624/624 [==============================] - 104s 166ms/step - loss: 0.0809 - accuracy: 0.9758 - val_loss: 0.2013 - val_accuracy: 0.9415
Epoch 5/20
624/624 [==============================] - 104s 166ms/step - loss: 0.0784 - accuracy: 0.9759 - val_loss: 0.4579 - val_accuracy: 0.9054
Calculating Accuracy:
82/82 [==============================] - 4s 48ms/step - loss: 0.2726 - accuracy: 0.9365
Test Accuracy: 0.9365384578704834
Classification Report
precision recall f1-score support
0 0.89 0.99 0.94 1300
1 0.99 0.88 0.93 1300
accuracy 0.94 2600
macro avg 0.94 0.94 0.94 2600
weighted avg 0.94 0.94 0.94 2600
Confusion Matrix
Train and Validation Accuracy
model11 = ModelWithLeakyRelu(rgbAugmentedData, 'Model With Leaky Relu | RGB data | Data augmentation')
model11.execute()
Model: "sequential"
_________________________________________________________________
Layer (type) Output Shape Param #
=================================================================
conv2d (Conv2D) (None, 64, 64, 32) 416
leaky_re_lu (LeakyReLU) (None, 64, 64, 32) 0
max_pooling2d (MaxPooling2D (None, 32, 32, 32) 0
)
dropout (Dropout) (None, 32, 32, 32) 0
conv2d_1 (Conv2D) (None, 32, 32, 32) 4128
leaky_re_lu_1 (LeakyReLU) (None, 32, 32, 32) 0
max_pooling2d_1 (MaxPooling (None, 16, 16, 32) 0
2D)
dropout_1 (Dropout) (None, 16, 16, 32) 0
conv2d_2 (Conv2D) (None, 16, 16, 32) 4128
leaky_re_lu_2 (LeakyReLU) (None, 16, 16, 32) 0
max_pooling2d_2 (MaxPooling (None, 8, 8, 32) 0
2D)
dropout_2 (Dropout) (None, 8, 8, 32) 0
flatten (Flatten) (None, 2048) 0
dense (Dense) (None, 512) 1049088
leaky_re_lu_3 (LeakyReLU) (None, 512) 0
dropout_3 (Dropout) (None, 512) 0
dense_1 (Dense) (None, 2) 1026
=================================================================
Total params: 1,058,786
Trainable params: 1,058,786
Non-trainable params: 0
_________________________________________________________________
Epoch 1/20
312/312 [==============================] - 117s 371ms/step - loss: 0.5641 - accuracy: 0.7007 - val_loss: 0.2152 - val_accuracy: 0.9273
Epoch 2/20
312/312 [==============================] - 116s 372ms/step - loss: 0.1936 - accuracy: 0.9366 - val_loss: 0.1096 - val_accuracy: 0.9690
Epoch 3/20
312/312 [==============================] - 116s 372ms/step - loss: 0.1281 - accuracy: 0.9594 - val_loss: 0.1137 - val_accuracy: 0.9627
Epoch 4/20
312/312 [==============================] - 116s 372ms/step - loss: 0.0978 - accuracy: 0.9674 - val_loss: 0.1157 - val_accuracy: 0.9633
Calculating Accuracy:
82/82 [==============================] - 4s 47ms/step - loss: 0.1274 - accuracy: 0.9554
Test Accuracy: 0.9553846120834351
Classification Report
precision recall f1-score support
0 0.93 0.99 0.96 1300
1 0.98 0.93 0.95 1300
accuracy 0.96 2600
macro avg 0.96 0.96 0.96 2600
weighted avg 0.96 0.96 0.96 2600
Confusion Matrix
Train and Validation Accuracy
model12 = ModelWithLeakyRelu(hsvData, 'Model With Leaky Relu | HSV data')
model12.execute()
Model: "sequential"
_________________________________________________________________
Layer (type) Output Shape Param #
=================================================================
conv2d (Conv2D) (None, 64, 64, 32) 416
leaky_re_lu (LeakyReLU) (None, 64, 64, 32) 0
max_pooling2d (MaxPooling2D (None, 32, 32, 32) 0
)
dropout (Dropout) (None, 32, 32, 32) 0
conv2d_1 (Conv2D) (None, 32, 32, 32) 4128
leaky_re_lu_1 (LeakyReLU) (None, 32, 32, 32) 0
max_pooling2d_1 (MaxPooling (None, 16, 16, 32) 0
2D)
dropout_1 (Dropout) (None, 16, 16, 32) 0
conv2d_2 (Conv2D) (None, 16, 16, 32) 4128
leaky_re_lu_2 (LeakyReLU) (None, 16, 16, 32) 0
max_pooling2d_2 (MaxPooling (None, 8, 8, 32) 0
2D)
dropout_2 (Dropout) (None, 8, 8, 32) 0
flatten (Flatten) (None, 2048) 0
dense (Dense) (None, 512) 1049088
leaky_re_lu_3 (LeakyReLU) (None, 512) 0
dropout_3 (Dropout) (None, 512) 0
dense_1 (Dense) (None, 2) 1026
=================================================================
Total params: 1,058,786
Trainable params: 1,058,786
Non-trainable params: 0
_________________________________________________________________
Epoch 1/20
624/624 [==============================] - 34s 54ms/step - loss: 1.9845 - accuracy: 0.5741 - val_loss: 1.2908 - val_accuracy: 0.0625
Epoch 2/20
624/624 [==============================] - 32s 51ms/step - loss: 0.6865 - accuracy: 0.6030 - val_loss: 1.3544 - val_accuracy: 0.0116
Epoch 3/20
624/624 [==============================] - 32s 52ms/step - loss: 0.6662 - accuracy: 0.6098 - val_loss: 0.9851 - val_accuracy: 0.0789
Epoch 4/20
624/624 [==============================] - 32s 51ms/step - loss: 0.6555 - accuracy: 0.6121 - val_loss: 1.2206 - val_accuracy: 0.0044
Epoch 5/20
624/624 [==============================] - 32s 51ms/step - loss: 0.6465 - accuracy: 0.6218 - val_loss: 1.0657 - val_accuracy: 0.0288
Calculating Accuracy:
82/82 [==============================] - 1s 14ms/step - loss: 0.7163 - accuracy: 0.5085
Test Accuracy: 0.5084615349769592
Classification Report
precision recall f1-score support
0 0.50 0.98 0.67 1300
1 0.64 0.04 0.07 1300
accuracy 0.51 2600
macro avg 0.57 0.51 0.37 2600
weighted avg 0.57 0.51 0.37 2600
Confusion Matrix
Train and Validation Accuracy
model13 = ModelWithLeakyRelu(hsvAugmentedData, 'Model With Leaky Relu | HSV data | Data augmentation')
model13.execute()
Model: "sequential"
_________________________________________________________________
Layer (type) Output Shape Param #
=================================================================
conv2d (Conv2D) (None, 64, 64, 32) 416
leaky_re_lu (LeakyReLU) (None, 64, 64, 32) 0
max_pooling2d (MaxPooling2D (None, 32, 32, 32) 0
)
dropout (Dropout) (None, 32, 32, 32) 0
conv2d_1 (Conv2D) (None, 32, 32, 32) 4128
leaky_re_lu_1 (LeakyReLU) (None, 32, 32, 32) 0
max_pooling2d_1 (MaxPooling (None, 16, 16, 32) 0
2D)
dropout_1 (Dropout) (None, 16, 16, 32) 0
conv2d_2 (Conv2D) (None, 16, 16, 32) 4128
leaky_re_lu_2 (LeakyReLU) (None, 16, 16, 32) 0
max_pooling2d_2 (MaxPooling (None, 8, 8, 32) 0
2D)
dropout_2 (Dropout) (None, 8, 8, 32) 0
flatten (Flatten) (None, 2048) 0
dense (Dense) (None, 512) 1049088
leaky_re_lu_3 (LeakyReLU) (None, 512) 0
dropout_3 (Dropout) (None, 512) 0
dense_1 (Dense) (None, 2) 1026
=================================================================
Total params: 1,058,786
Trainable params: 1,058,786
Non-trainable params: 0
_________________________________________________________________
Epoch 1/20
312/312 [==============================] - 117s 371ms/step - loss: 3.0808 - accuracy: 0.5146 - val_loss: 0.6962 - val_accuracy: 0.5403
Epoch 2/20
312/312 [==============================] - 116s 372ms/step - loss: 0.7531 - accuracy: 0.5204 - val_loss: 0.6905 - val_accuracy: 0.5250
Epoch 3/20
312/312 [==============================] - 116s 370ms/step - loss: 0.7171 - accuracy: 0.5268 - val_loss: 0.6985 - val_accuracy: 0.5136
Epoch 4/20
312/312 [==============================] - 116s 372ms/step - loss: 0.7023 - accuracy: 0.5414 - val_loss: 0.6775 - val_accuracy: 0.5467
Epoch 5/20
312/312 [==============================] - 116s 371ms/step - loss: 0.6949 - accuracy: 0.5475 - val_loss: 0.6703 - val_accuracy: 0.5761
Epoch 6/20
312/312 [==============================] - 116s 371ms/step - loss: 0.6895 - accuracy: 0.5513 - val_loss: 0.7138 - val_accuracy: 0.5198
Epoch 7/20
312/312 [==============================] - 116s 371ms/step - loss: 0.6848 - accuracy: 0.5636 - val_loss: 0.7101 - val_accuracy: 0.5306
Calculating Accuracy:
82/82 [==============================] - 4s 47ms/step - loss: 0.6993 - accuracy: 0.5000
Test Accuracy: 0.5
Classification Report
precision recall f1-score support
0 0.50 1.00 0.67 1300
1 0.00 0.00 0.00 1300
accuracy 0.50 2600
macro avg 0.25 0.50 0.33 2600
weighted avg 0.25 0.50 0.33 2600
Confusion Matrix
Train and Validation Accuracy
class ModelWithLeakyReluBatchNormalization(AbstractModel):
def __init__(self, modelData, name):
self.model = Sequential()
self.name = name
AbstractModel.__init__(self, self.model, modelData)
def define_model(self):
self.model.add(Conv2D(32, (3,3), input_shape=(64, 64, 3), padding='same'))
self.model.add(LeakyReLU(0.1))
self.model.add(MaxPooling2D(pool_size=2))
self.model.add(Conv2D(filters=32,kernel_size=2,padding="same"))
self.model.add(LeakyReLU(0.1))
self.model.add(MaxPooling2D(pool_size=2))
self.model.add(BatchNormalization())
self.model.add(Conv2D(filters=32,kernel_size=2,padding="same"))
self.model.add(LeakyReLU(0.1))
self.model.add(MaxPooling2D(pool_size=2))
self.model.add(Conv2D(filters=64,kernel_size=2,padding="same"))
self.model.add(LeakyReLU(0.1))
self.model.add(MaxPooling2D(pool_size=2))
self.model.add(BatchNormalization())
self.model.add(Flatten())
self.model.add(Dense(512))
self.model.add(LeakyReLU(0.1))
self.model.add(Dropout(0.4))
self.model.add(Dense(len(self.modelData.classifiers),activation="softmax"))
model20 = ModelWithLeakyReluBatchNormalization(rgbData, 'Model With Leaky Relu & Batch Normalization | RGB data')
model20.execute()
Model: "sequential"
_________________________________________________________________
Layer (type) Output Shape Param #
=================================================================
conv2d (Conv2D) (None, 64, 64, 32) 896
leaky_re_lu (LeakyReLU) (None, 64, 64, 32) 0
max_pooling2d (MaxPooling2D (None, 32, 32, 32) 0
)
conv2d_1 (Conv2D) (None, 32, 32, 32) 4128
leaky_re_lu_1 (LeakyReLU) (None, 32, 32, 32) 0
max_pooling2d_1 (MaxPooling (None, 16, 16, 32) 0
2D)
batch_normalization (BatchN (None, 16, 16, 32) 128
ormalization)
conv2d_2 (Conv2D) (None, 16, 16, 32) 4128
leaky_re_lu_2 (LeakyReLU) (None, 16, 16, 32) 0
max_pooling2d_2 (MaxPooling (None, 8, 8, 32) 0
2D)
conv2d_3 (Conv2D) (None, 8, 8, 64) 8256
leaky_re_lu_3 (LeakyReLU) (None, 8, 8, 64) 0
max_pooling2d_3 (MaxPooling (None, 4, 4, 64) 0
2D)
batch_normalization_1 (Batc (None, 4, 4, 64) 256
hNormalization)
flatten (Flatten) (None, 1024) 0
dense (Dense) (None, 512) 524800
leaky_re_lu_4 (LeakyReLU) (None, 512) 0
dropout (Dropout) (None, 512) 0
dense_1 (Dense) (None, 2) 1026
=================================================================
Total params: 543,618
Trainable params: 543,426
Non-trainable params: 192
_________________________________________________________________
Epoch 1/20
624/624 [==============================] - 38s 60ms/step - loss: 0.1767 - accuracy: 0.9357 - val_loss: 0.2330 - val_accuracy: 0.9048
Epoch 2/20
624/624 [==============================] - 36s 58ms/step - loss: 0.0809 - accuracy: 0.9769 - val_loss: 0.1329 - val_accuracy: 0.9487
Epoch 3/20
624/624 [==============================] - 36s 58ms/step - loss: 0.0713 - accuracy: 0.9791 - val_loss: 0.0684 - val_accuracy: 0.9651
Epoch 4/20
624/624 [==============================] - 36s 58ms/step - loss: 0.0667 - accuracy: 0.9795 - val_loss: 0.1265 - val_accuracy: 0.9525
Epoch 5/20
624/624 [==============================] - 36s 58ms/step - loss: 0.0609 - accuracy: 0.9813 - val_loss: 0.1011 - val_accuracy: 0.9603
Calculating Accuracy:
82/82 [==============================] - 2s 17ms/step - loss: 0.0580 - accuracy: 0.9823
Test Accuracy: 0.9823076725006104
Classification Report
precision recall f1-score support
0 0.97 0.99 0.98 1300
1 0.99 0.97 0.98 1300
accuracy 0.98 2600
macro avg 0.98 0.98 0.98 2600
weighted avg 0.98 0.98 0.98 2600
Confusion Matrix
Train and Validation Accuracy
model21 = ModelWithLeakyReluBatchNormalization(rgbAugmentedData, 'Model With Leaky Relu & Batch Normalization | RGB data | Data augmentation')
model21.execute()
Model: "sequential"
_________________________________________________________________
Layer (type) Output Shape Param #
=================================================================
conv2d (Conv2D) (None, 64, 64, 32) 896
leaky_re_lu (LeakyReLU) (None, 64, 64, 32) 0
max_pooling2d (MaxPooling2D (None, 32, 32, 32) 0
)
conv2d_1 (Conv2D) (None, 32, 32, 32) 4128
leaky_re_lu_1 (LeakyReLU) (None, 32, 32, 32) 0
max_pooling2d_1 (MaxPooling (None, 16, 16, 32) 0
2D)
batch_normalization (BatchN (None, 16, 16, 32) 128
ormalization)
conv2d_2 (Conv2D) (None, 16, 16, 32) 4128
leaky_re_lu_2 (LeakyReLU) (None, 16, 16, 32) 0
max_pooling2d_2 (MaxPooling (None, 8, 8, 32) 0
2D)
conv2d_3 (Conv2D) (None, 8, 8, 64) 8256
leaky_re_lu_3 (LeakyReLU) (None, 8, 8, 64) 0
max_pooling2d_3 (MaxPooling (None, 4, 4, 64) 0
2D)
batch_normalization_1 (Batc (None, 4, 4, 64) 256
hNormalization)
flatten (Flatten) (None, 1024) 0
dense (Dense) (None, 512) 524800
leaky_re_lu_4 (LeakyReLU) (None, 512) 0
dropout (Dropout) (None, 512) 0
dense_1 (Dense) (None, 2) 1026
=================================================================
Total params: 543,618
Trainable params: 543,426
Non-trainable params: 192
_________________________________________________________________
Epoch 1/20
312/312 [==============================] - 42s 132ms/step - loss: 0.2381 - accuracy: 0.9040 - val_loss: 0.9237 - val_accuracy: 0.6737
Epoch 2/20
312/312 [==============================] - 38s 120ms/step - loss: 0.0947 - accuracy: 0.9697 - val_loss: 0.0917 - val_accuracy: 0.9732
Epoch 3/20
312/312 [==============================] - 37s 118ms/step - loss: 0.0812 - accuracy: 0.9743 - val_loss: 0.0816 - val_accuracy: 0.9802
Epoch 4/20
312/312 [==============================] - 37s 117ms/step - loss: 0.0807 - accuracy: 0.9747 - val_loss: 0.1002 - val_accuracy: 0.9756
Epoch 5/20
312/312 [==============================] - 37s 118ms/step - loss: 0.0746 - accuracy: 0.9760 - val_loss: 0.0801 - val_accuracy: 0.9768
Epoch 6/20
312/312 [==============================] - 37s 118ms/step - loss: 0.0758 - accuracy: 0.9749 - val_loss: 0.0790 - val_accuracy: 0.9764
Epoch 7/20
312/312 [==============================] - 37s 118ms/step - loss: 0.0726 - accuracy: 0.9769 - val_loss: 0.0873 - val_accuracy: 0.9772
Epoch 8/20
312/312 [==============================] - 37s 118ms/step - loss: 0.0703 - accuracy: 0.9772 - val_loss: 0.1089 - val_accuracy: 0.9760
Calculating Accuracy:
82/82 [==============================] - 2s 17ms/step - loss: 0.0992 - accuracy: 0.9769
Test Accuracy: 0.9769230484962463
Classification Report
precision recall f1-score support
0 1.00 0.96 0.98 1300
1 0.96 1.00 0.98 1300
accuracy 0.98 2600
macro avg 0.98 0.98 0.98 2600
weighted avg 0.98 0.98 0.98 2600
Confusion Matrix
Train and Validation Accuracy
model22 = ModelWithLeakyReluBatchNormalization(hsvData, 'Model With Leaky Relu & Batch Normalization | HSV data')
model22.execute()
Model: "sequential"
_________________________________________________________________
Layer (type) Output Shape Param #
=================================================================
conv2d (Conv2D) (None, 64, 64, 32) 896
leaky_re_lu (LeakyReLU) (None, 64, 64, 32) 0
max_pooling2d (MaxPooling2D (None, 32, 32, 32) 0
)
conv2d_1 (Conv2D) (None, 32, 32, 32) 4128
leaky_re_lu_1 (LeakyReLU) (None, 32, 32, 32) 0
max_pooling2d_1 (MaxPooling (None, 16, 16, 32) 0
2D)
batch_normalization (BatchN (None, 16, 16, 32) 128
ormalization)
conv2d_2 (Conv2D) (None, 16, 16, 32) 4128
leaky_re_lu_2 (LeakyReLU) (None, 16, 16, 32) 0
max_pooling2d_2 (MaxPooling (None, 8, 8, 32) 0
2D)
conv2d_3 (Conv2D) (None, 8, 8, 64) 8256
leaky_re_lu_3 (LeakyReLU) (None, 8, 8, 64) 0
max_pooling2d_3 (MaxPooling (None, 4, 4, 64) 0
2D)
batch_normalization_1 (Batc (None, 4, 4, 64) 256
hNormalization)
flatten (Flatten) (None, 1024) 0
dense (Dense) (None, 512) 524800
leaky_re_lu_4 (LeakyReLU) (None, 512) 0
dropout (Dropout) (None, 512) 0
dense_1 (Dense) (None, 2) 1026
=================================================================
Total params: 543,618
Trainable params: 543,426
Non-trainable params: 192
_________________________________________________________________
Epoch 1/20
624/624 [==============================] - 38s 59ms/step - loss: 0.6270 - accuracy: 0.6635 - val_loss: 7.2006 - val_accuracy: 0.0000e+00
Epoch 2/20
624/624 [==============================] - 36s 58ms/step - loss: 0.3084 - accuracy: 0.8668 - val_loss: 2.7912 - val_accuracy: 0.0052
Epoch 3/20
624/624 [==============================] - 36s 58ms/step - loss: 0.1243 - accuracy: 0.9600 - val_loss: 0.2487 - val_accuracy: 0.8962
Epoch 4/20
624/624 [==============================] - 36s 58ms/step - loss: 0.1003 - accuracy: 0.9693 - val_loss: 0.1183 - val_accuracy: 0.9499
Epoch 5/20
624/624 [==============================] - 36s 58ms/step - loss: 0.0876 - accuracy: 0.9735 - val_loss: 0.2384 - val_accuracy: 0.9030
Epoch 6/20
624/624 [==============================] - 36s 58ms/step - loss: 0.0844 - accuracy: 0.9747 - val_loss: 0.4720 - val_accuracy: 0.8528
Calculating Accuracy:
82/82 [==============================] - 2s 17ms/step - loss: 2.7736 - accuracy: 0.5004
Test Accuracy: 0.5003846287727356
Classification Report
precision recall f1-score support
0 0.57 0.00 0.01 1300
1 0.50 1.00 0.67 1300
accuracy 0.50 2600
macro avg 0.54 0.50 0.34 2600
weighted avg 0.54 0.50 0.34 2600
Confusion Matrix
Train and Validation Accuracy
model23 = ModelWithLeakyReluBatchNormalization(hsvAugmentedData, 'Model With Leaky Relu & Batch Normalization | HSV data | Data augmentation')
model23.execute()
Model: "sequential"
_________________________________________________________________
Layer (type) Output Shape Param #
=================================================================
conv2d (Conv2D) (None, 64, 64, 32) 896
leaky_re_lu (LeakyReLU) (None, 64, 64, 32) 0
max_pooling2d (MaxPooling2D (None, 32, 32, 32) 0
)
conv2d_1 (Conv2D) (None, 32, 32, 32) 4128
leaky_re_lu_1 (LeakyReLU) (None, 32, 32, 32) 0
max_pooling2d_1 (MaxPooling (None, 16, 16, 32) 0
2D)
batch_normalization (BatchN (None, 16, 16, 32) 128
ormalization)
conv2d_2 (Conv2D) (None, 16, 16, 32) 4128
leaky_re_lu_2 (LeakyReLU) (None, 16, 16, 32) 0
max_pooling2d_2 (MaxPooling (None, 8, 8, 32) 0
2D)
conv2d_3 (Conv2D) (None, 8, 8, 64) 8256
leaky_re_lu_3 (LeakyReLU) (None, 8, 8, 64) 0
max_pooling2d_3 (MaxPooling (None, 4, 4, 64) 0
2D)
batch_normalization_1 (Batc (None, 4, 4, 64) 256
hNormalization)
flatten (Flatten) (None, 1024) 0
dense (Dense) (None, 512) 524800
leaky_re_lu_4 (LeakyReLU) (None, 512) 0
dropout (Dropout) (None, 512) 0
dense_1 (Dense) (None, 2) 1026
=================================================================
Total params: 543,618
Trainable params: 543,426
Non-trainable params: 192
_________________________________________________________________
Epoch 1/20
312/312 [==============================] - 118s 376ms/step - loss: 0.6721 - accuracy: 0.6240 - val_loss: 1.1453 - val_accuracy: 0.5158
Epoch 2/20
312/312 [==============================] - 117s 375ms/step - loss: 0.4554 - accuracy: 0.7738 - val_loss: 1.1074 - val_accuracy: 0.5325
Epoch 3/20
312/312 [==============================] - 120s 384ms/step - loss: 0.2584 - accuracy: 0.8874 - val_loss: 1.6730 - val_accuracy: 0.5397
Epoch 4/20
312/312 [==============================] - 117s 375ms/step - loss: 0.1489 - accuracy: 0.9476 - val_loss: 0.5522 - val_accuracy: 0.8031
Epoch 5/20
312/312 [==============================] - 117s 373ms/step - loss: 0.1259 - accuracy: 0.9551 - val_loss: 0.1276 - val_accuracy: 0.9591
Epoch 6/20
312/312 [==============================] - 117s 374ms/step - loss: 0.1107 - accuracy: 0.9635 - val_loss: 0.1762 - val_accuracy: 0.9369
Epoch 7/20
312/312 [==============================] - 117s 374ms/step - loss: 0.1081 - accuracy: 0.9645 - val_loss: 1.1781 - val_accuracy: 0.7354
Calculating Accuracy:
82/82 [==============================] - 4s 52ms/step - loss: 1.3760 - accuracy: 0.5335
Test Accuracy: 0.5334615111351013
Classification Report
precision recall f1-score support
0 0.52 0.90 0.66 1300
1 0.63 0.17 0.26 1300
accuracy 0.53 2600
macro avg 0.57 0.53 0.46 2600
weighted avg 0.57 0.53 0.46 2600
Confusion Matrix
Train and Validation Accuracy
class ModelWithTransferLearningVGG16(AbstractModel):
def __init__(self, modelData, name):
self.model = Sequential()
self.name = name
AbstractModel.__init__(self, self.model, modelData)
def define_model(self):
vgg = VGG16(include_top=False, weights='imagenet', input_shape=(64,64,3))
vgg.summary()
transfer_layer = vgg.get_layer('block5_pool')
vgg.trainable=False
x = Flatten()(transfer_layer.output) #Flatten the output from the 3rd block of the VGG16 model
x = Dense(256, activation='relu')(x)
# Similarly add a dense layer with 128 neurons
x = Dense(128, activation='relu')(x)
x = Dropout(0.3)(x)
# Add a dense layer with 64 neurons
x = Dense(64, activation='relu')(x)
x = BatchNormalization()(x)
pred = Dense(len(self.modelData.classifiers), activation='softmax')(x)
self.model = Model(vgg.input, pred)
model30 = ModelWithTransferLearningVGG16(rgbData, 'Model With Transfer Learning VGG16 | RGB data')
model30.execute()
Downloading data from https://storage.googleapis.com/tensorflow/keras-applications/vgg16/vgg16_weights_tf_dim_ordering_tf_kernels_notop.h5
58892288/58889256 [==============================] - 0s 0us/step
58900480/58889256 [==============================] - 0s 0us/step
Model: "vgg16"
_________________________________________________________________
Layer (type) Output Shape Param #
=================================================================
input_1 (InputLayer) [(None, 64, 64, 3)] 0
block1_conv1 (Conv2D) (None, 64, 64, 64) 1792
block1_conv2 (Conv2D) (None, 64, 64, 64) 36928
block1_pool (MaxPooling2D) (None, 32, 32, 64) 0
block2_conv1 (Conv2D) (None, 32, 32, 128) 73856
block2_conv2 (Conv2D) (None, 32, 32, 128) 147584
block2_pool (MaxPooling2D) (None, 16, 16, 128) 0
block3_conv1 (Conv2D) (None, 16, 16, 256) 295168
block3_conv2 (Conv2D) (None, 16, 16, 256) 590080
block3_conv3 (Conv2D) (None, 16, 16, 256) 590080
block3_pool (MaxPooling2D) (None, 8, 8, 256) 0
block4_conv1 (Conv2D) (None, 8, 8, 512) 1180160
block4_conv2 (Conv2D) (None, 8, 8, 512) 2359808
block4_conv3 (Conv2D) (None, 8, 8, 512) 2359808
block4_pool (MaxPooling2D) (None, 4, 4, 512) 0
block5_conv1 (Conv2D) (None, 4, 4, 512) 2359808
block5_conv2 (Conv2D) (None, 4, 4, 512) 2359808
block5_conv3 (Conv2D) (None, 4, 4, 512) 2359808
block5_pool (MaxPooling2D) (None, 2, 2, 512) 0
=================================================================
Total params: 14,714,688
Trainable params: 14,714,688
Non-trainable params: 0
_________________________________________________________________
Model: "model"
_________________________________________________________________
Layer (type) Output Shape Param #
=================================================================
input_1 (InputLayer) [(None, 64, 64, 3)] 0
block1_conv1 (Conv2D) (None, 64, 64, 64) 1792
block1_conv2 (Conv2D) (None, 64, 64, 64) 36928
block1_pool (MaxPooling2D) (None, 32, 32, 64) 0
block2_conv1 (Conv2D) (None, 32, 32, 128) 73856
block2_conv2 (Conv2D) (None, 32, 32, 128) 147584
block2_pool (MaxPooling2D) (None, 16, 16, 128) 0
block3_conv1 (Conv2D) (None, 16, 16, 256) 295168
block3_conv2 (Conv2D) (None, 16, 16, 256) 590080
block3_conv3 (Conv2D) (None, 16, 16, 256) 590080
block3_pool (MaxPooling2D) (None, 8, 8, 256) 0
block4_conv1 (Conv2D) (None, 8, 8, 512) 1180160
block4_conv2 (Conv2D) (None, 8, 8, 512) 2359808
block4_conv3 (Conv2D) (None, 8, 8, 512) 2359808
block4_pool (MaxPooling2D) (None, 4, 4, 512) 0
block5_conv1 (Conv2D) (None, 4, 4, 512) 2359808
block5_conv2 (Conv2D) (None, 4, 4, 512) 2359808
block5_conv3 (Conv2D) (None, 4, 4, 512) 2359808
block5_pool (MaxPooling2D) (None, 2, 2, 512) 0
flatten (Flatten) (None, 2048) 0
dense (Dense) (None, 256) 524544
dense_1 (Dense) (None, 128) 32896
dropout (Dropout) (None, 128) 0
dense_2 (Dense) (None, 64) 8256
batch_normalization (BatchN (None, 64) 256
ormalization)
dense_3 (Dense) (None, 2) 130
=================================================================
Total params: 15,280,770
Trainable params: 565,954
Non-trainable params: 14,714,816
_________________________________________________________________
Epoch 1/20
624/624 [==============================] - 1030s 2s/step - loss: 0.2177 - accuracy: 0.9187 - val_loss: 0.1780 - val_accuracy: 0.9193
Epoch 2/20
624/624 [==============================] - 1037s 2s/step - loss: 0.1539 - accuracy: 0.9464 - val_loss: 0.3002 - val_accuracy: 0.8808
Epoch 3/20
624/624 [==============================] - 1031s 2s/step - loss: 0.1409 - accuracy: 0.9503 - val_loss: 0.0717 - val_accuracy: 0.9698
Epoch 4/20
624/624 [==============================] - 1030s 2s/step - loss: 0.1355 - accuracy: 0.9516 - val_loss: 0.1595 - val_accuracy: 0.9267
Epoch 5/20
624/624 [==============================] - 1028s 2s/step - loss: 0.1236 - accuracy: 0.9571 - val_loss: 0.0968 - val_accuracy: 0.9531
Calculating Accuracy:
82/82 [==============================] - 108s 1s/step - loss: 0.1805 - accuracy: 0.9319
Test Accuracy: 0.9319230914115906
Classification Report
precision recall f1-score support
0 0.96 0.90 0.93 1300
1 0.91 0.96 0.93 1300
accuracy 0.93 2600
macro avg 0.93 0.93 0.93 2600
weighted avg 0.93 0.93 0.93 2600
Confusion Matrix
Train and Validation Accuracy
model31 = ModelWithTransferLearningVGG16(rgbAugmentedData, 'Model With Transfer Learning VGG16 | RGB data | Data augmentation')
model31.execute()
Model: "vgg16"
_________________________________________________________________
Layer (type) Output Shape Param #
=================================================================
input_1 (InputLayer) [(None, 64, 64, 3)] 0
block1_conv1 (Conv2D) (None, 64, 64, 64) 1792
block1_conv2 (Conv2D) (None, 64, 64, 64) 36928
block1_pool (MaxPooling2D) (None, 32, 32, 64) 0
block2_conv1 (Conv2D) (None, 32, 32, 128) 73856
block2_conv2 (Conv2D) (None, 32, 32, 128) 147584
block2_pool (MaxPooling2D) (None, 16, 16, 128) 0
block3_conv1 (Conv2D) (None, 16, 16, 256) 295168
block3_conv2 (Conv2D) (None, 16, 16, 256) 590080
block3_conv3 (Conv2D) (None, 16, 16, 256) 590080
block3_pool (MaxPooling2D) (None, 8, 8, 256) 0
block4_conv1 (Conv2D) (None, 8, 8, 512) 1180160
block4_conv2 (Conv2D) (None, 8, 8, 512) 2359808
block4_conv3 (Conv2D) (None, 8, 8, 512) 2359808
block4_pool (MaxPooling2D) (None, 4, 4, 512) 0
block5_conv1 (Conv2D) (None, 4, 4, 512) 2359808
block5_conv2 (Conv2D) (None, 4, 4, 512) 2359808
block5_conv3 (Conv2D) (None, 4, 4, 512) 2359808
block5_pool (MaxPooling2D) (None, 2, 2, 512) 0
=================================================================
Total params: 14,714,688
Trainable params: 14,714,688
Non-trainable params: 0
_________________________________________________________________
Model: "model"
_________________________________________________________________
Layer (type) Output Shape Param #
=================================================================
input_1 (InputLayer) [(None, 64, 64, 3)] 0
block1_conv1 (Conv2D) (None, 64, 64, 64) 1792
block1_conv2 (Conv2D) (None, 64, 64, 64) 36928
block1_pool (MaxPooling2D) (None, 32, 32, 64) 0
block2_conv1 (Conv2D) (None, 32, 32, 128) 73856
block2_conv2 (Conv2D) (None, 32, 32, 128) 147584
block2_pool (MaxPooling2D) (None, 16, 16, 128) 0
block3_conv1 (Conv2D) (None, 16, 16, 256) 295168
block3_conv2 (Conv2D) (None, 16, 16, 256) 590080
block3_conv3 (Conv2D) (None, 16, 16, 256) 590080
block3_pool (MaxPooling2D) (None, 8, 8, 256) 0
block4_conv1 (Conv2D) (None, 8, 8, 512) 1180160
block4_conv2 (Conv2D) (None, 8, 8, 512) 2359808
block4_conv3 (Conv2D) (None, 8, 8, 512) 2359808
block4_pool (MaxPooling2D) (None, 4, 4, 512) 0
block5_conv1 (Conv2D) (None, 4, 4, 512) 2359808
block5_conv2 (Conv2D) (None, 4, 4, 512) 2359808
block5_conv3 (Conv2D) (None, 4, 4, 512) 2359808
block5_pool (MaxPooling2D) (None, 2, 2, 512) 0
flatten (Flatten) (None, 2048) 0
dense (Dense) (None, 256) 524544
dense_1 (Dense) (None, 128) 32896
dropout (Dropout) (None, 128) 0
dense_2 (Dense) (None, 64) 8256
batch_normalization (BatchN (None, 64) 256
ormalization)
dense_3 (Dense) (None, 2) 130
=================================================================
Total params: 15,280,770
Trainable params: 565,954
Non-trainable params: 14,714,816
_________________________________________________________________
Epoch 1/20
312/312 [==============================] - 1043s 3s/step - loss: 0.2575 - accuracy: 0.8992 - val_loss: 0.3087 - val_accuracy: 0.8960
Epoch 2/20
312/312 [==============================] - 1036s 3s/step - loss: 0.2079 - accuracy: 0.9201 - val_loss: 0.1561 - val_accuracy: 0.9477
Epoch 3/20
312/312 [==============================] - 1034s 3s/step - loss: 0.1932 - accuracy: 0.9292 - val_loss: 0.1745 - val_accuracy: 0.9349
Epoch 4/20
312/312 [==============================] - 1032s 3s/step - loss: 0.1863 - accuracy: 0.9303 - val_loss: 0.1839 - val_accuracy: 0.9335
Calculating Accuracy:
82/82 [==============================] - 108s 1s/step - loss: 0.1439 - accuracy: 0.9446
Test Accuracy: 0.944615364074707
Classification Report
precision recall f1-score support
0 0.92 0.98 0.95 1300
1 0.97 0.91 0.94 1300
accuracy 0.94 2600
macro avg 0.95 0.94 0.94 2600
weighted avg 0.95 0.94 0.94 2600
Confusion Matrix
Train and Validation Accuracy
model32 = ModelWithTransferLearningVGG16(hsvData, 'Model With Transfer Learning VGG16 | HSV data')
model32.execute()
Downloading data from https://storage.googleapis.com/tensorflow/keras-applications/vgg16/vgg16_weights_tf_dim_ordering_tf_kernels_notop.h5
58892288/58889256 [==============================] - 0s 0us/step
58900480/58889256 [==============================] - 0s 0us/step
Model: "vgg16"
_________________________________________________________________
Layer (type) Output Shape Param #
=================================================================
input_1 (InputLayer) [(None, 64, 64, 3)] 0
block1_conv1 (Conv2D) (None, 64, 64, 64) 1792
block1_conv2 (Conv2D) (None, 64, 64, 64) 36928
block1_pool (MaxPooling2D) (None, 32, 32, 64) 0
block2_conv1 (Conv2D) (None, 32, 32, 128) 73856
block2_conv2 (Conv2D) (None, 32, 32, 128) 147584
block2_pool (MaxPooling2D) (None, 16, 16, 128) 0
block3_conv1 (Conv2D) (None, 16, 16, 256) 295168
block3_conv2 (Conv2D) (None, 16, 16, 256) 590080
block3_conv3 (Conv2D) (None, 16, 16, 256) 590080
block3_pool (MaxPooling2D) (None, 8, 8, 256) 0
block4_conv1 (Conv2D) (None, 8, 8, 512) 1180160
block4_conv2 (Conv2D) (None, 8, 8, 512) 2359808
block4_conv3 (Conv2D) (None, 8, 8, 512) 2359808
block4_pool (MaxPooling2D) (None, 4, 4, 512) 0
block5_conv1 (Conv2D) (None, 4, 4, 512) 2359808
block5_conv2 (Conv2D) (None, 4, 4, 512) 2359808
block5_conv3 (Conv2D) (None, 4, 4, 512) 2359808
block5_pool (MaxPooling2D) (None, 2, 2, 512) 0
=================================================================
Total params: 14,714,688
Trainable params: 14,714,688
Non-trainable params: 0
_________________________________________________________________
Model: "model"
_________________________________________________________________
Layer (type) Output Shape Param #
=================================================================
input_1 (InputLayer) [(None, 64, 64, 3)] 0
block1_conv1 (Conv2D) (None, 64, 64, 64) 1792
block1_conv2 (Conv2D) (None, 64, 64, 64) 36928
block1_pool (MaxPooling2D) (None, 32, 32, 64) 0
block2_conv1 (Conv2D) (None, 32, 32, 128) 73856
block2_conv2 (Conv2D) (None, 32, 32, 128) 147584
block2_pool (MaxPooling2D) (None, 16, 16, 128) 0
block3_conv1 (Conv2D) (None, 16, 16, 256) 295168
block3_conv2 (Conv2D) (None, 16, 16, 256) 590080
block3_conv3 (Conv2D) (None, 16, 16, 256) 590080
block3_pool (MaxPooling2D) (None, 8, 8, 256) 0
block4_conv1 (Conv2D) (None, 8, 8, 512) 1180160
block4_conv2 (Conv2D) (None, 8, 8, 512) 2359808
block4_conv3 (Conv2D) (None, 8, 8, 512) 2359808
block4_pool (MaxPooling2D) (None, 4, 4, 512) 0
block5_conv1 (Conv2D) (None, 4, 4, 512) 2359808
block5_conv2 (Conv2D) (None, 4, 4, 512) 2359808
block5_conv3 (Conv2D) (None, 4, 4, 512) 2359808
block5_pool (MaxPooling2D) (None, 2, 2, 512) 0
flatten (Flatten) (None, 2048) 0
dense (Dense) (None, 256) 524544
dense_1 (Dense) (None, 128) 32896
dropout (Dropout) (None, 128) 0
dense_2 (Dense) (None, 64) 8256
batch_normalization (BatchN (None, 64) 256
ormalization)
dense_3 (Dense) (None, 2) 130
=================================================================
Total params: 15,280,770
Trainable params: 565,954
Non-trainable params: 14,714,816
_________________________________________________________________
Epoch 1/20
624/624 [==============================] - 272s 435ms/step - loss: 0.5721 - accuracy: 0.7109 - val_loss: 0.5730 - val_accuracy: 0.6769
Epoch 2/20
624/624 [==============================] - 268s 430ms/step - loss: 0.5084 - accuracy: 0.7559 - val_loss: 0.7486 - val_accuracy: 0.6132
Epoch 3/20
624/624 [==============================] - 268s 430ms/step - loss: 0.4831 - accuracy: 0.7724 - val_loss: 0.6976 - val_accuracy: 0.6294
Calculating Accuracy:
82/82 [==============================] - 28s 342ms/step - loss: 0.9426 - accuracy: 0.5000
Test Accuracy: 0.5
Classification Report
precision recall f1-score support
0 0.50 1.00 0.67 1300
1 0.00 0.00 0.00 1300
accuracy 0.50 2600
macro avg 0.25 0.50 0.33 2600
weighted avg 0.25 0.50 0.33 2600
Confusion Matrix
Train and Validation Accuracy
model33 = ModelWithTransferLearningVGG16(hsvAugmentedData, 'Model With Transfer Learning VGG16 | HSV data | Data augmentation')
model33.execute()
Model: "vgg16"
_________________________________________________________________
Layer (type) Output Shape Param #
=================================================================
input_1 (InputLayer) [(None, 64, 64, 3)] 0
block1_conv1 (Conv2D) (None, 64, 64, 64) 1792
block1_conv2 (Conv2D) (None, 64, 64, 64) 36928
block1_pool (MaxPooling2D) (None, 32, 32, 64) 0
block2_conv1 (Conv2D) (None, 32, 32, 128) 73856
block2_conv2 (Conv2D) (None, 32, 32, 128) 147584
block2_pool (MaxPooling2D) (None, 16, 16, 128) 0
block3_conv1 (Conv2D) (None, 16, 16, 256) 295168
block3_conv2 (Conv2D) (None, 16, 16, 256) 590080
block3_conv3 (Conv2D) (None, 16, 16, 256) 590080
block3_pool (MaxPooling2D) (None, 8, 8, 256) 0
block4_conv1 (Conv2D) (None, 8, 8, 512) 1180160
block4_conv2 (Conv2D) (None, 8, 8, 512) 2359808
block4_conv3 (Conv2D) (None, 8, 8, 512) 2359808
block4_pool (MaxPooling2D) (None, 4, 4, 512) 0
block5_conv1 (Conv2D) (None, 4, 4, 512) 2359808
block5_conv2 (Conv2D) (None, 4, 4, 512) 2359808
block5_conv3 (Conv2D) (None, 4, 4, 512) 2359808
block5_pool (MaxPooling2D) (None, 2, 2, 512) 0
=================================================================
Total params: 14,714,688
Trainable params: 14,714,688
Non-trainable params: 0
_________________________________________________________________
Model: "model"
_________________________________________________________________
Layer (type) Output Shape Param #
=================================================================
input_1 (InputLayer) [(None, 64, 64, 3)] 0
block1_conv1 (Conv2D) (None, 64, 64, 64) 1792
block1_conv2 (Conv2D) (None, 64, 64, 64) 36928
block1_pool (MaxPooling2D) (None, 32, 32, 64) 0
block2_conv1 (Conv2D) (None, 32, 32, 128) 73856
block2_conv2 (Conv2D) (None, 32, 32, 128) 147584
block2_pool (MaxPooling2D) (None, 16, 16, 128) 0
block3_conv1 (Conv2D) (None, 16, 16, 256) 295168
block3_conv2 (Conv2D) (None, 16, 16, 256) 590080
block3_conv3 (Conv2D) (None, 16, 16, 256) 590080
block3_pool (MaxPooling2D) (None, 8, 8, 256) 0
block4_conv1 (Conv2D) (None, 8, 8, 512) 1180160
block4_conv2 (Conv2D) (None, 8, 8, 512) 2359808
block4_conv3 (Conv2D) (None, 8, 8, 512) 2359808
block4_pool (MaxPooling2D) (None, 4, 4, 512) 0
block5_conv1 (Conv2D) (None, 4, 4, 512) 2359808
block5_conv2 (Conv2D) (None, 4, 4, 512) 2359808
block5_conv3 (Conv2D) (None, 4, 4, 512) 2359808
block5_pool (MaxPooling2D) (None, 2, 2, 512) 0
flatten (Flatten) (None, 2048) 0
dense (Dense) (None, 256) 524544
dense_1 (Dense) (None, 128) 32896
dropout (Dropout) (None, 128) 0
dense_2 (Dense) (None, 64) 8256
batch_normalization (BatchN (None, 64) 256
ormalization)
dense_3 (Dense) (None, 2) 130
=================================================================
Total params: 15,280,770
Trainable params: 565,954
Non-trainable params: 14,714,816
_________________________________________________________________
Epoch 1/20
312/312 [==============================] - 905s 3s/step - loss: 0.6355 - accuracy: 0.6626 - val_loss: 0.5725 - val_accuracy: 0.7183
Epoch 2/20
312/312 [==============================] - 904s 3s/step - loss: 0.5775 - accuracy: 0.7046 - val_loss: 0.5522 - val_accuracy: 0.7240
Epoch 3/20
312/312 [==============================] - 904s 3s/step - loss: 0.5564 - accuracy: 0.7206 - val_loss: 0.5941 - val_accuracy: 0.7089
Epoch 4/20
312/312 [==============================] - 903s 3s/step - loss: 0.5464 - accuracy: 0.7265 - val_loss: 0.5497 - val_accuracy: 0.7330
Epoch 5/20
312/312 [==============================] - 903s 3s/step - loss: 0.5406 - accuracy: 0.7271 - val_loss: 0.5291 - val_accuracy: 0.7360
Epoch 6/20
312/312 [==============================] - 903s 3s/step - loss: 0.5336 - accuracy: 0.7350 - val_loss: 0.5369 - val_accuracy: 0.7400
Epoch 7/20
312/312 [==============================] - 904s 3s/step - loss: 0.5298 - accuracy: 0.7383 - val_loss: 0.5667 - val_accuracy: 0.7055
Calculating Accuracy:
82/82 [==============================] - 93s 1s/step - loss: 0.9343 - accuracy: 0.5000
Test Accuracy: 0.5
Classification Report
precision recall f1-score support
0 0.50 1.00 0.67 1300
1 0.00 0.00 0.00 1300
accuracy 0.50 2600
macro avg 0.25 0.50 0.33 2600
weighted avg 0.25 0.50 0.33 2600
Confusion Matrix
Train and Validation Accuracy
rgbData_increasedSize = RgbData(data, is_increased_size_required = True)
rgbAugmentedData_increasedSize = RgbAugmentedData(data, is_increased_size_required = True)
hsvData_increasedSize = HsvData(data, is_increased_size_required = True)
hsvAugmentedData_increasedSize = HsvAugmentedData(data, is_increased_size_required = True)
class ModelWithTransferLearningInceptionV3(AbstractModel):
def __init__(self, modelData, name):
self.model = Sequential()
self.name = name
AbstractModel.__init__(self, self.model, modelData)
def define_model(self):
inceptionv3 = InceptionV3(include_top=False, weights='imagenet', input_shape=(75,75,3))
inceptionv3.summary()
last_layer = inceptionv3.layers[-1]
x = Flatten()(last_layer.output)
x = Dense(256, activation='relu')(x)
# Similarly add a dense layer with 128 neurons
x = Dense(128, activation='relu')(x)
x = Dropout(0.2)(x)
# Add a dense layer with 64 neurons
x = Dense(64, activation='relu')(x)
x = BatchNormalization()(x)
pred = Dense(len(self.modelData.classifiers), activation='softmax')(x)
self.model = Model(inceptionv3.input, pred)
model40 = ModelWithTransferLearningInceptionV3(rgbData_increasedSize, 'Model With Transfer Learning Inception V3 | RGB data')
model40.execute()
Downloading data from https://storage.googleapis.com/tensorflow/keras-applications/inception_v3/inception_v3_weights_tf_dim_ordering_tf_kernels_notop.h5
87916544/87910968 [==============================] - 0s 0us/step
87924736/87910968 [==============================] - 0s 0us/step
Model: "inception_v3"
__________________________________________________________________________________________________
Layer (type) Output Shape Param # Connected to
==================================================================================================
input_1 (InputLayer) [(None, 75, 75, 3)] 0 []
conv2d (Conv2D) (None, 37, 37, 32) 864 ['input_1[0][0]']
batch_normalization (BatchNorm (None, 37, 37, 32) 96 ['conv2d[0][0]']
alization)
activation (Activation) (None, 37, 37, 32) 0 ['batch_normalization[0][0]']
conv2d_1 (Conv2D) (None, 35, 35, 32) 9216 ['activation[0][0]']
batch_normalization_1 (BatchNo (None, 35, 35, 32) 96 ['conv2d_1[0][0]']
rmalization)
activation_1 (Activation) (None, 35, 35, 32) 0 ['batch_normalization_1[0][0]']
conv2d_2 (Conv2D) (None, 35, 35, 64) 18432 ['activation_1[0][0]']
batch_normalization_2 (BatchNo (None, 35, 35, 64) 192 ['conv2d_2[0][0]']
rmalization)
activation_2 (Activation) (None, 35, 35, 64) 0 ['batch_normalization_2[0][0]']
max_pooling2d (MaxPooling2D) (None, 17, 17, 64) 0 ['activation_2[0][0]']
conv2d_3 (Conv2D) (None, 17, 17, 80) 5120 ['max_pooling2d[0][0]']
batch_normalization_3 (BatchNo (None, 17, 17, 80) 240 ['conv2d_3[0][0]']
rmalization)
activation_3 (Activation) (None, 17, 17, 80) 0 ['batch_normalization_3[0][0]']
conv2d_4 (Conv2D) (None, 15, 15, 192) 138240 ['activation_3[0][0]']
batch_normalization_4 (BatchNo (None, 15, 15, 192) 576 ['conv2d_4[0][0]']
rmalization)
activation_4 (Activation) (None, 15, 15, 192) 0 ['batch_normalization_4[0][0]']
max_pooling2d_1 (MaxPooling2D) (None, 7, 7, 192) 0 ['activation_4[0][0]']
conv2d_8 (Conv2D) (None, 7, 7, 64) 12288 ['max_pooling2d_1[0][0]']
batch_normalization_8 (BatchNo (None, 7, 7, 64) 192 ['conv2d_8[0][0]']
rmalization)
activation_8 (Activation) (None, 7, 7, 64) 0 ['batch_normalization_8[0][0]']
conv2d_6 (Conv2D) (None, 7, 7, 48) 9216 ['max_pooling2d_1[0][0]']
conv2d_9 (Conv2D) (None, 7, 7, 96) 55296 ['activation_8[0][0]']
batch_normalization_6 (BatchNo (None, 7, 7, 48) 144 ['conv2d_6[0][0]']
rmalization)
batch_normalization_9 (BatchNo (None, 7, 7, 96) 288 ['conv2d_9[0][0]']
rmalization)
activation_6 (Activation) (None, 7, 7, 48) 0 ['batch_normalization_6[0][0]']
activation_9 (Activation) (None, 7, 7, 96) 0 ['batch_normalization_9[0][0]']
average_pooling2d (AveragePool (None, 7, 7, 192) 0 ['max_pooling2d_1[0][0]']
ing2D)
conv2d_5 (Conv2D) (None, 7, 7, 64) 12288 ['max_pooling2d_1[0][0]']
conv2d_7 (Conv2D) (None, 7, 7, 64) 76800 ['activation_6[0][0]']
conv2d_10 (Conv2D) (None, 7, 7, 96) 82944 ['activation_9[0][0]']
conv2d_11 (Conv2D) (None, 7, 7, 32) 6144 ['average_pooling2d[0][0]']
batch_normalization_5 (BatchNo (None, 7, 7, 64) 192 ['conv2d_5[0][0]']
rmalization)
batch_normalization_7 (BatchNo (None, 7, 7, 64) 192 ['conv2d_7[0][0]']
rmalization)
batch_normalization_10 (BatchN (None, 7, 7, 96) 288 ['conv2d_10[0][0]']
ormalization)
batch_normalization_11 (BatchN (None, 7, 7, 32) 96 ['conv2d_11[0][0]']
ormalization)
activation_5 (Activation) (None, 7, 7, 64) 0 ['batch_normalization_5[0][0]']
activation_7 (Activation) (None, 7, 7, 64) 0 ['batch_normalization_7[0][0]']
activation_10 (Activation) (None, 7, 7, 96) 0 ['batch_normalization_10[0][0]']
activation_11 (Activation) (None, 7, 7, 32) 0 ['batch_normalization_11[0][0]']
mixed0 (Concatenate) (None, 7, 7, 256) 0 ['activation_5[0][0]',
'activation_7[0][0]',
'activation_10[0][0]',
'activation_11[0][0]']
conv2d_15 (Conv2D) (None, 7, 7, 64) 16384 ['mixed0[0][0]']
batch_normalization_15 (BatchN (None, 7, 7, 64) 192 ['conv2d_15[0][0]']
ormalization)
activation_15 (Activation) (None, 7, 7, 64) 0 ['batch_normalization_15[0][0]']
conv2d_13 (Conv2D) (None, 7, 7, 48) 12288 ['mixed0[0][0]']
conv2d_16 (Conv2D) (None, 7, 7, 96) 55296 ['activation_15[0][0]']
batch_normalization_13 (BatchN (None, 7, 7, 48) 144 ['conv2d_13[0][0]']
ormalization)
batch_normalization_16 (BatchN (None, 7, 7, 96) 288 ['conv2d_16[0][0]']
ormalization)
activation_13 (Activation) (None, 7, 7, 48) 0 ['batch_normalization_13[0][0]']
activation_16 (Activation) (None, 7, 7, 96) 0 ['batch_normalization_16[0][0]']
average_pooling2d_1 (AveragePo (None, 7, 7, 256) 0 ['mixed0[0][0]']
oling2D)
conv2d_12 (Conv2D) (None, 7, 7, 64) 16384 ['mixed0[0][0]']
conv2d_14 (Conv2D) (None, 7, 7, 64) 76800 ['activation_13[0][0]']
conv2d_17 (Conv2D) (None, 7, 7, 96) 82944 ['activation_16[0][0]']
conv2d_18 (Conv2D) (None, 7, 7, 64) 16384 ['average_pooling2d_1[0][0]']
batch_normalization_12 (BatchN (None, 7, 7, 64) 192 ['conv2d_12[0][0]']
ormalization)
batch_normalization_14 (BatchN (None, 7, 7, 64) 192 ['conv2d_14[0][0]']
ormalization)
batch_normalization_17 (BatchN (None, 7, 7, 96) 288 ['conv2d_17[0][0]']
ormalization)
batch_normalization_18 (BatchN (None, 7, 7, 64) 192 ['conv2d_18[0][0]']
ormalization)
activation_12 (Activation) (None, 7, 7, 64) 0 ['batch_normalization_12[0][0]']
activation_14 (Activation) (None, 7, 7, 64) 0 ['batch_normalization_14[0][0]']
activation_17 (Activation) (None, 7, 7, 96) 0 ['batch_normalization_17[0][0]']
activation_18 (Activation) (None, 7, 7, 64) 0 ['batch_normalization_18[0][0]']
mixed1 (Concatenate) (None, 7, 7, 288) 0 ['activation_12[0][0]',
'activation_14[0][0]',
'activation_17[0][0]',
'activation_18[0][0]']
conv2d_22 (Conv2D) (None, 7, 7, 64) 18432 ['mixed1[0][0]']
batch_normalization_22 (BatchN (None, 7, 7, 64) 192 ['conv2d_22[0][0]']
ormalization)
activation_22 (Activation) (None, 7, 7, 64) 0 ['batch_normalization_22[0][0]']
conv2d_20 (Conv2D) (None, 7, 7, 48) 13824 ['mixed1[0][0]']
conv2d_23 (Conv2D) (None, 7, 7, 96) 55296 ['activation_22[0][0]']
batch_normalization_20 (BatchN (None, 7, 7, 48) 144 ['conv2d_20[0][0]']
ormalization)
batch_normalization_23 (BatchN (None, 7, 7, 96) 288 ['conv2d_23[0][0]']
ormalization)
activation_20 (Activation) (None, 7, 7, 48) 0 ['batch_normalization_20[0][0]']
activation_23 (Activation) (None, 7, 7, 96) 0 ['batch_normalization_23[0][0]']
average_pooling2d_2 (AveragePo (None, 7, 7, 288) 0 ['mixed1[0][0]']
oling2D)
conv2d_19 (Conv2D) (None, 7, 7, 64) 18432 ['mixed1[0][0]']
conv2d_21 (Conv2D) (None, 7, 7, 64) 76800 ['activation_20[0][0]']
conv2d_24 (Conv2D) (None, 7, 7, 96) 82944 ['activation_23[0][0]']
conv2d_25 (Conv2D) (None, 7, 7, 64) 18432 ['average_pooling2d_2[0][0]']
batch_normalization_19 (BatchN (None, 7, 7, 64) 192 ['conv2d_19[0][0]']
ormalization)
batch_normalization_21 (BatchN (None, 7, 7, 64) 192 ['conv2d_21[0][0]']
ormalization)
batch_normalization_24 (BatchN (None, 7, 7, 96) 288 ['conv2d_24[0][0]']
ormalization)
batch_normalization_25 (BatchN (None, 7, 7, 64) 192 ['conv2d_25[0][0]']
ormalization)
activation_19 (Activation) (None, 7, 7, 64) 0 ['batch_normalization_19[0][0]']
activation_21 (Activation) (None, 7, 7, 64) 0 ['batch_normalization_21[0][0]']
activation_24 (Activation) (None, 7, 7, 96) 0 ['batch_normalization_24[0][0]']
activation_25 (Activation) (None, 7, 7, 64) 0 ['batch_normalization_25[0][0]']
mixed2 (Concatenate) (None, 7, 7, 288) 0 ['activation_19[0][0]',
'activation_21[0][0]',
'activation_24[0][0]',
'activation_25[0][0]']
conv2d_27 (Conv2D) (None, 7, 7, 64) 18432 ['mixed2[0][0]']
batch_normalization_27 (BatchN (None, 7, 7, 64) 192 ['conv2d_27[0][0]']
ormalization)
activation_27 (Activation) (None, 7, 7, 64) 0 ['batch_normalization_27[0][0]']
conv2d_28 (Conv2D) (None, 7, 7, 96) 55296 ['activation_27[0][0]']
batch_normalization_28 (BatchN (None, 7, 7, 96) 288 ['conv2d_28[0][0]']
ormalization)
activation_28 (Activation) (None, 7, 7, 96) 0 ['batch_normalization_28[0][0]']
conv2d_26 (Conv2D) (None, 3, 3, 384) 995328 ['mixed2[0][0]']
conv2d_29 (Conv2D) (None, 3, 3, 96) 82944 ['activation_28[0][0]']
batch_normalization_26 (BatchN (None, 3, 3, 384) 1152 ['conv2d_26[0][0]']
ormalization)
batch_normalization_29 (BatchN (None, 3, 3, 96) 288 ['conv2d_29[0][0]']
ormalization)
activation_26 (Activation) (None, 3, 3, 384) 0 ['batch_normalization_26[0][0]']
activation_29 (Activation) (None, 3, 3, 96) 0 ['batch_normalization_29[0][0]']
max_pooling2d_2 (MaxPooling2D) (None, 3, 3, 288) 0 ['mixed2[0][0]']
mixed3 (Concatenate) (None, 3, 3, 768) 0 ['activation_26[0][0]',
'activation_29[0][0]',
'max_pooling2d_2[0][0]']
conv2d_34 (Conv2D) (None, 3, 3, 128) 98304 ['mixed3[0][0]']
batch_normalization_34 (BatchN (None, 3, 3, 128) 384 ['conv2d_34[0][0]']
ormalization)
activation_34 (Activation) (None, 3, 3, 128) 0 ['batch_normalization_34[0][0]']
conv2d_35 (Conv2D) (None, 3, 3, 128) 114688 ['activation_34[0][0]']
batch_normalization_35 (BatchN (None, 3, 3, 128) 384 ['conv2d_35[0][0]']
ormalization)
activation_35 (Activation) (None, 3, 3, 128) 0 ['batch_normalization_35[0][0]']
conv2d_31 (Conv2D) (None, 3, 3, 128) 98304 ['mixed3[0][0]']
conv2d_36 (Conv2D) (None, 3, 3, 128) 114688 ['activation_35[0][0]']
batch_normalization_31 (BatchN (None, 3, 3, 128) 384 ['conv2d_31[0][0]']
ormalization)
batch_normalization_36 (BatchN (None, 3, 3, 128) 384 ['conv2d_36[0][0]']
ormalization)
activation_31 (Activation) (None, 3, 3, 128) 0 ['batch_normalization_31[0][0]']
activation_36 (Activation) (None, 3, 3, 128) 0 ['batch_normalization_36[0][0]']
conv2d_32 (Conv2D) (None, 3, 3, 128) 114688 ['activation_31[0][0]']
conv2d_37 (Conv2D) (None, 3, 3, 128) 114688 ['activation_36[0][0]']
batch_normalization_32 (BatchN (None, 3, 3, 128) 384 ['conv2d_32[0][0]']
ormalization)
batch_normalization_37 (BatchN (None, 3, 3, 128) 384 ['conv2d_37[0][0]']
ormalization)
activation_32 (Activation) (None, 3, 3, 128) 0 ['batch_normalization_32[0][0]']
activation_37 (Activation) (None, 3, 3, 128) 0 ['batch_normalization_37[0][0]']
average_pooling2d_3 (AveragePo (None, 3, 3, 768) 0 ['mixed3[0][0]']
oling2D)
conv2d_30 (Conv2D) (None, 3, 3, 192) 147456 ['mixed3[0][0]']
conv2d_33 (Conv2D) (None, 3, 3, 192) 172032 ['activation_32[0][0]']
conv2d_38 (Conv2D) (None, 3, 3, 192) 172032 ['activation_37[0][0]']
conv2d_39 (Conv2D) (None, 3, 3, 192) 147456 ['average_pooling2d_3[0][0]']
batch_normalization_30 (BatchN (None, 3, 3, 192) 576 ['conv2d_30[0][0]']
ormalization)
batch_normalization_33 (BatchN (None, 3, 3, 192) 576 ['conv2d_33[0][0]']
ormalization)
batch_normalization_38 (BatchN (None, 3, 3, 192) 576 ['conv2d_38[0][0]']
ormalization)
batch_normalization_39 (BatchN (None, 3, 3, 192) 576 ['conv2d_39[0][0]']
ormalization)
activation_30 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_30[0][0]']
activation_33 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_33[0][0]']
activation_38 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_38[0][0]']
activation_39 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_39[0][0]']
mixed4 (Concatenate) (None, 3, 3, 768) 0 ['activation_30[0][0]',
'activation_33[0][0]',
'activation_38[0][0]',
'activation_39[0][0]']
conv2d_44 (Conv2D) (None, 3, 3, 160) 122880 ['mixed4[0][0]']
batch_normalization_44 (BatchN (None, 3, 3, 160) 480 ['conv2d_44[0][0]']
ormalization)
activation_44 (Activation) (None, 3, 3, 160) 0 ['batch_normalization_44[0][0]']
conv2d_45 (Conv2D) (None, 3, 3, 160) 179200 ['activation_44[0][0]']
batch_normalization_45 (BatchN (None, 3, 3, 160) 480 ['conv2d_45[0][0]']
ormalization)
activation_45 (Activation) (None, 3, 3, 160) 0 ['batch_normalization_45[0][0]']
conv2d_41 (Conv2D) (None, 3, 3, 160) 122880 ['mixed4[0][0]']
conv2d_46 (Conv2D) (None, 3, 3, 160) 179200 ['activation_45[0][0]']
batch_normalization_41 (BatchN (None, 3, 3, 160) 480 ['conv2d_41[0][0]']
ormalization)
batch_normalization_46 (BatchN (None, 3, 3, 160) 480 ['conv2d_46[0][0]']
ormalization)
activation_41 (Activation) (None, 3, 3, 160) 0 ['batch_normalization_41[0][0]']
activation_46 (Activation) (None, 3, 3, 160) 0 ['batch_normalization_46[0][0]']
conv2d_42 (Conv2D) (None, 3, 3, 160) 179200 ['activation_41[0][0]']
conv2d_47 (Conv2D) (None, 3, 3, 160) 179200 ['activation_46[0][0]']
batch_normalization_42 (BatchN (None, 3, 3, 160) 480 ['conv2d_42[0][0]']
ormalization)
batch_normalization_47 (BatchN (None, 3, 3, 160) 480 ['conv2d_47[0][0]']
ormalization)
activation_42 (Activation) (None, 3, 3, 160) 0 ['batch_normalization_42[0][0]']
activation_47 (Activation) (None, 3, 3, 160) 0 ['batch_normalization_47[0][0]']
average_pooling2d_4 (AveragePo (None, 3, 3, 768) 0 ['mixed4[0][0]']
oling2D)
conv2d_40 (Conv2D) (None, 3, 3, 192) 147456 ['mixed4[0][0]']
conv2d_43 (Conv2D) (None, 3, 3, 192) 215040 ['activation_42[0][0]']
conv2d_48 (Conv2D) (None, 3, 3, 192) 215040 ['activation_47[0][0]']
conv2d_49 (Conv2D) (None, 3, 3, 192) 147456 ['average_pooling2d_4[0][0]']
batch_normalization_40 (BatchN (None, 3, 3, 192) 576 ['conv2d_40[0][0]']
ormalization)
batch_normalization_43 (BatchN (None, 3, 3, 192) 576 ['conv2d_43[0][0]']
ormalization)
batch_normalization_48 (BatchN (None, 3, 3, 192) 576 ['conv2d_48[0][0]']
ormalization)
batch_normalization_49 (BatchN (None, 3, 3, 192) 576 ['conv2d_49[0][0]']
ormalization)
activation_40 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_40[0][0]']
activation_43 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_43[0][0]']
activation_48 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_48[0][0]']
activation_49 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_49[0][0]']
mixed5 (Concatenate) (None, 3, 3, 768) 0 ['activation_40[0][0]',
'activation_43[0][0]',
'activation_48[0][0]',
'activation_49[0][0]']
conv2d_54 (Conv2D) (None, 3, 3, 160) 122880 ['mixed5[0][0]']
batch_normalization_54 (BatchN (None, 3, 3, 160) 480 ['conv2d_54[0][0]']
ormalization)
activation_54 (Activation) (None, 3, 3, 160) 0 ['batch_normalization_54[0][0]']
conv2d_55 (Conv2D) (None, 3, 3, 160) 179200 ['activation_54[0][0]']
batch_normalization_55 (BatchN (None, 3, 3, 160) 480 ['conv2d_55[0][0]']
ormalization)
activation_55 (Activation) (None, 3, 3, 160) 0 ['batch_normalization_55[0][0]']
conv2d_51 (Conv2D) (None, 3, 3, 160) 122880 ['mixed5[0][0]']
conv2d_56 (Conv2D) (None, 3, 3, 160) 179200 ['activation_55[0][0]']
batch_normalization_51 (BatchN (None, 3, 3, 160) 480 ['conv2d_51[0][0]']
ormalization)
batch_normalization_56 (BatchN (None, 3, 3, 160) 480 ['conv2d_56[0][0]']
ormalization)
activation_51 (Activation) (None, 3, 3, 160) 0 ['batch_normalization_51[0][0]']
activation_56 (Activation) (None, 3, 3, 160) 0 ['batch_normalization_56[0][0]']
conv2d_52 (Conv2D) (None, 3, 3, 160) 179200 ['activation_51[0][0]']
conv2d_57 (Conv2D) (None, 3, 3, 160) 179200 ['activation_56[0][0]']
batch_normalization_52 (BatchN (None, 3, 3, 160) 480 ['conv2d_52[0][0]']
ormalization)
batch_normalization_57 (BatchN (None, 3, 3, 160) 480 ['conv2d_57[0][0]']
ormalization)
activation_52 (Activation) (None, 3, 3, 160) 0 ['batch_normalization_52[0][0]']
activation_57 (Activation) (None, 3, 3, 160) 0 ['batch_normalization_57[0][0]']
average_pooling2d_5 (AveragePo (None, 3, 3, 768) 0 ['mixed5[0][0]']
oling2D)
conv2d_50 (Conv2D) (None, 3, 3, 192) 147456 ['mixed5[0][0]']
conv2d_53 (Conv2D) (None, 3, 3, 192) 215040 ['activation_52[0][0]']
conv2d_58 (Conv2D) (None, 3, 3, 192) 215040 ['activation_57[0][0]']
conv2d_59 (Conv2D) (None, 3, 3, 192) 147456 ['average_pooling2d_5[0][0]']
batch_normalization_50 (BatchN (None, 3, 3, 192) 576 ['conv2d_50[0][0]']
ormalization)
batch_normalization_53 (BatchN (None, 3, 3, 192) 576 ['conv2d_53[0][0]']
ormalization)
batch_normalization_58 (BatchN (None, 3, 3, 192) 576 ['conv2d_58[0][0]']
ormalization)
batch_normalization_59 (BatchN (None, 3, 3, 192) 576 ['conv2d_59[0][0]']
ormalization)
activation_50 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_50[0][0]']
activation_53 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_53[0][0]']
activation_58 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_58[0][0]']
activation_59 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_59[0][0]']
mixed6 (Concatenate) (None, 3, 3, 768) 0 ['activation_50[0][0]',
'activation_53[0][0]',
'activation_58[0][0]',
'activation_59[0][0]']
conv2d_64 (Conv2D) (None, 3, 3, 192) 147456 ['mixed6[0][0]']
batch_normalization_64 (BatchN (None, 3, 3, 192) 576 ['conv2d_64[0][0]']
ormalization)
activation_64 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_64[0][0]']
conv2d_65 (Conv2D) (None, 3, 3, 192) 258048 ['activation_64[0][0]']
batch_normalization_65 (BatchN (None, 3, 3, 192) 576 ['conv2d_65[0][0]']
ormalization)
activation_65 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_65[0][0]']
conv2d_61 (Conv2D) (None, 3, 3, 192) 147456 ['mixed6[0][0]']
conv2d_66 (Conv2D) (None, 3, 3, 192) 258048 ['activation_65[0][0]']
batch_normalization_61 (BatchN (None, 3, 3, 192) 576 ['conv2d_61[0][0]']
ormalization)
batch_normalization_66 (BatchN (None, 3, 3, 192) 576 ['conv2d_66[0][0]']
ormalization)
activation_61 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_61[0][0]']
activation_66 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_66[0][0]']
conv2d_62 (Conv2D) (None, 3, 3, 192) 258048 ['activation_61[0][0]']
conv2d_67 (Conv2D) (None, 3, 3, 192) 258048 ['activation_66[0][0]']
batch_normalization_62 (BatchN (None, 3, 3, 192) 576 ['conv2d_62[0][0]']
ormalization)
batch_normalization_67 (BatchN (None, 3, 3, 192) 576 ['conv2d_67[0][0]']
ormalization)
activation_62 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_62[0][0]']
activation_67 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_67[0][0]']
average_pooling2d_6 (AveragePo (None, 3, 3, 768) 0 ['mixed6[0][0]']
oling2D)
conv2d_60 (Conv2D) (None, 3, 3, 192) 147456 ['mixed6[0][0]']
conv2d_63 (Conv2D) (None, 3, 3, 192) 258048 ['activation_62[0][0]']
conv2d_68 (Conv2D) (None, 3, 3, 192) 258048 ['activation_67[0][0]']
conv2d_69 (Conv2D) (None, 3, 3, 192) 147456 ['average_pooling2d_6[0][0]']
batch_normalization_60 (BatchN (None, 3, 3, 192) 576 ['conv2d_60[0][0]']
ormalization)
batch_normalization_63 (BatchN (None, 3, 3, 192) 576 ['conv2d_63[0][0]']
ormalization)
batch_normalization_68 (BatchN (None, 3, 3, 192) 576 ['conv2d_68[0][0]']
ormalization)
batch_normalization_69 (BatchN (None, 3, 3, 192) 576 ['conv2d_69[0][0]']
ormalization)
activation_60 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_60[0][0]']
activation_63 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_63[0][0]']
activation_68 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_68[0][0]']
activation_69 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_69[0][0]']
mixed7 (Concatenate) (None, 3, 3, 768) 0 ['activation_60[0][0]',
'activation_63[0][0]',
'activation_68[0][0]',
'activation_69[0][0]']
conv2d_72 (Conv2D) (None, 3, 3, 192) 147456 ['mixed7[0][0]']
batch_normalization_72 (BatchN (None, 3, 3, 192) 576 ['conv2d_72[0][0]']
ormalization)
activation_72 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_72[0][0]']
conv2d_73 (Conv2D) (None, 3, 3, 192) 258048 ['activation_72[0][0]']
batch_normalization_73 (BatchN (None, 3, 3, 192) 576 ['conv2d_73[0][0]']
ormalization)
activation_73 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_73[0][0]']
conv2d_70 (Conv2D) (None, 3, 3, 192) 147456 ['mixed7[0][0]']
conv2d_74 (Conv2D) (None, 3, 3, 192) 258048 ['activation_73[0][0]']
batch_normalization_70 (BatchN (None, 3, 3, 192) 576 ['conv2d_70[0][0]']
ormalization)
batch_normalization_74 (BatchN (None, 3, 3, 192) 576 ['conv2d_74[0][0]']
ormalization)
activation_70 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_70[0][0]']
activation_74 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_74[0][0]']
conv2d_71 (Conv2D) (None, 1, 1, 320) 552960 ['activation_70[0][0]']
conv2d_75 (Conv2D) (None, 1, 1, 192) 331776 ['activation_74[0][0]']
batch_normalization_71 (BatchN (None, 1, 1, 320) 960 ['conv2d_71[0][0]']
ormalization)
batch_normalization_75 (BatchN (None, 1, 1, 192) 576 ['conv2d_75[0][0]']
ormalization)
activation_71 (Activation) (None, 1, 1, 320) 0 ['batch_normalization_71[0][0]']
activation_75 (Activation) (None, 1, 1, 192) 0 ['batch_normalization_75[0][0]']
max_pooling2d_3 (MaxPooling2D) (None, 1, 1, 768) 0 ['mixed7[0][0]']
mixed8 (Concatenate) (None, 1, 1, 1280) 0 ['activation_71[0][0]',
'activation_75[0][0]',
'max_pooling2d_3[0][0]']
conv2d_80 (Conv2D) (None, 1, 1, 448) 573440 ['mixed8[0][0]']
batch_normalization_80 (BatchN (None, 1, 1, 448) 1344 ['conv2d_80[0][0]']
ormalization)
activation_80 (Activation) (None, 1, 1, 448) 0 ['batch_normalization_80[0][0]']
conv2d_77 (Conv2D) (None, 1, 1, 384) 491520 ['mixed8[0][0]']
conv2d_81 (Conv2D) (None, 1, 1, 384) 1548288 ['activation_80[0][0]']
batch_normalization_77 (BatchN (None, 1, 1, 384) 1152 ['conv2d_77[0][0]']
ormalization)
batch_normalization_81 (BatchN (None, 1, 1, 384) 1152 ['conv2d_81[0][0]']
ormalization)
activation_77 (Activation) (None, 1, 1, 384) 0 ['batch_normalization_77[0][0]']
activation_81 (Activation) (None, 1, 1, 384) 0 ['batch_normalization_81[0][0]']
conv2d_78 (Conv2D) (None, 1, 1, 384) 442368 ['activation_77[0][0]']
conv2d_79 (Conv2D) (None, 1, 1, 384) 442368 ['activation_77[0][0]']
conv2d_82 (Conv2D) (None, 1, 1, 384) 442368 ['activation_81[0][0]']
conv2d_83 (Conv2D) (None, 1, 1, 384) 442368 ['activation_81[0][0]']
average_pooling2d_7 (AveragePo (None, 1, 1, 1280) 0 ['mixed8[0][0]']
oling2D)
conv2d_76 (Conv2D) (None, 1, 1, 320) 409600 ['mixed8[0][0]']
batch_normalization_78 (BatchN (None, 1, 1, 384) 1152 ['conv2d_78[0][0]']
ormalization)
batch_normalization_79 (BatchN (None, 1, 1, 384) 1152 ['conv2d_79[0][0]']
ormalization)
batch_normalization_82 (BatchN (None, 1, 1, 384) 1152 ['conv2d_82[0][0]']
ormalization)
batch_normalization_83 (BatchN (None, 1, 1, 384) 1152 ['conv2d_83[0][0]']
ormalization)
conv2d_84 (Conv2D) (None, 1, 1, 192) 245760 ['average_pooling2d_7[0][0]']
batch_normalization_76 (BatchN (None, 1, 1, 320) 960 ['conv2d_76[0][0]']
ormalization)
activation_78 (Activation) (None, 1, 1, 384) 0 ['batch_normalization_78[0][0]']
activation_79 (Activation) (None, 1, 1, 384) 0 ['batch_normalization_79[0][0]']
activation_82 (Activation) (None, 1, 1, 384) 0 ['batch_normalization_82[0][0]']
activation_83 (Activation) (None, 1, 1, 384) 0 ['batch_normalization_83[0][0]']
batch_normalization_84 (BatchN (None, 1, 1, 192) 576 ['conv2d_84[0][0]']
ormalization)
activation_76 (Activation) (None, 1, 1, 320) 0 ['batch_normalization_76[0][0]']
mixed9_0 (Concatenate) (None, 1, 1, 768) 0 ['activation_78[0][0]',
'activation_79[0][0]']
concatenate (Concatenate) (None, 1, 1, 768) 0 ['activation_82[0][0]',
'activation_83[0][0]']
activation_84 (Activation) (None, 1, 1, 192) 0 ['batch_normalization_84[0][0]']
mixed9 (Concatenate) (None, 1, 1, 2048) 0 ['activation_76[0][0]',
'mixed9_0[0][0]',
'concatenate[0][0]',
'activation_84[0][0]']
conv2d_89 (Conv2D) (None, 1, 1, 448) 917504 ['mixed9[0][0]']
batch_normalization_89 (BatchN (None, 1, 1, 448) 1344 ['conv2d_89[0][0]']
ormalization)
activation_89 (Activation) (None, 1, 1, 448) 0 ['batch_normalization_89[0][0]']
conv2d_86 (Conv2D) (None, 1, 1, 384) 786432 ['mixed9[0][0]']
conv2d_90 (Conv2D) (None, 1, 1, 384) 1548288 ['activation_89[0][0]']
batch_normalization_86 (BatchN (None, 1, 1, 384) 1152 ['conv2d_86[0][0]']
ormalization)
batch_normalization_90 (BatchN (None, 1, 1, 384) 1152 ['conv2d_90[0][0]']
ormalization)
activation_86 (Activation) (None, 1, 1, 384) 0 ['batch_normalization_86[0][0]']
activation_90 (Activation) (None, 1, 1, 384) 0 ['batch_normalization_90[0][0]']
conv2d_87 (Conv2D) (None, 1, 1, 384) 442368 ['activation_86[0][0]']
conv2d_88 (Conv2D) (None, 1, 1, 384) 442368 ['activation_86[0][0]']
conv2d_91 (Conv2D) (None, 1, 1, 384) 442368 ['activation_90[0][0]']
conv2d_92 (Conv2D) (None, 1, 1, 384) 442368 ['activation_90[0][0]']
average_pooling2d_8 (AveragePo (None, 1, 1, 2048) 0 ['mixed9[0][0]']
oling2D)
conv2d_85 (Conv2D) (None, 1, 1, 320) 655360 ['mixed9[0][0]']
batch_normalization_87 (BatchN (None, 1, 1, 384) 1152 ['conv2d_87[0][0]']
ormalization)
batch_normalization_88 (BatchN (None, 1, 1, 384) 1152 ['conv2d_88[0][0]']
ormalization)
batch_normalization_91 (BatchN (None, 1, 1, 384) 1152 ['conv2d_91[0][0]']
ormalization)
batch_normalization_92 (BatchN (None, 1, 1, 384) 1152 ['conv2d_92[0][0]']
ormalization)
conv2d_93 (Conv2D) (None, 1, 1, 192) 393216 ['average_pooling2d_8[0][0]']
batch_normalization_85 (BatchN (None, 1, 1, 320) 960 ['conv2d_85[0][0]']
ormalization)
activation_87 (Activation) (None, 1, 1, 384) 0 ['batch_normalization_87[0][0]']
activation_88 (Activation) (None, 1, 1, 384) 0 ['batch_normalization_88[0][0]']
activation_91 (Activation) (None, 1, 1, 384) 0 ['batch_normalization_91[0][0]']
activation_92 (Activation) (None, 1, 1, 384) 0 ['batch_normalization_92[0][0]']
batch_normalization_93 (BatchN (None, 1, 1, 192) 576 ['conv2d_93[0][0]']
ormalization)
activation_85 (Activation) (None, 1, 1, 320) 0 ['batch_normalization_85[0][0]']
mixed9_1 (Concatenate) (None, 1, 1, 768) 0 ['activation_87[0][0]',
'activation_88[0][0]']
concatenate_1 (Concatenate) (None, 1, 1, 768) 0 ['activation_91[0][0]',
'activation_92[0][0]']
activation_93 (Activation) (None, 1, 1, 192) 0 ['batch_normalization_93[0][0]']
mixed10 (Concatenate) (None, 1, 1, 2048) 0 ['activation_85[0][0]',
'mixed9_1[0][0]',
'concatenate_1[0][0]',
'activation_93[0][0]']
==================================================================================================
Total params: 21,802,784
Trainable params: 21,768,352
Non-trainable params: 34,432
__________________________________________________________________________________________________
Model: "model"
__________________________________________________________________________________________________
Layer (type) Output Shape Param # Connected to
==================================================================================================
input_1 (InputLayer) [(None, 75, 75, 3)] 0 []
conv2d (Conv2D) (None, 37, 37, 32) 864 ['input_1[0][0]']
batch_normalization (BatchNorm (None, 37, 37, 32) 96 ['conv2d[0][0]']
alization)
activation (Activation) (None, 37, 37, 32) 0 ['batch_normalization[0][0]']
conv2d_1 (Conv2D) (None, 35, 35, 32) 9216 ['activation[0][0]']
batch_normalization_1 (BatchNo (None, 35, 35, 32) 96 ['conv2d_1[0][0]']
rmalization)
activation_1 (Activation) (None, 35, 35, 32) 0 ['batch_normalization_1[0][0]']
conv2d_2 (Conv2D) (None, 35, 35, 64) 18432 ['activation_1[0][0]']
batch_normalization_2 (BatchNo (None, 35, 35, 64) 192 ['conv2d_2[0][0]']
rmalization)
activation_2 (Activation) (None, 35, 35, 64) 0 ['batch_normalization_2[0][0]']
max_pooling2d (MaxPooling2D) (None, 17, 17, 64) 0 ['activation_2[0][0]']
conv2d_3 (Conv2D) (None, 17, 17, 80) 5120 ['max_pooling2d[0][0]']
batch_normalization_3 (BatchNo (None, 17, 17, 80) 240 ['conv2d_3[0][0]']
rmalization)
activation_3 (Activation) (None, 17, 17, 80) 0 ['batch_normalization_3[0][0]']
conv2d_4 (Conv2D) (None, 15, 15, 192) 138240 ['activation_3[0][0]']
batch_normalization_4 (BatchNo (None, 15, 15, 192) 576 ['conv2d_4[0][0]']
rmalization)
activation_4 (Activation) (None, 15, 15, 192) 0 ['batch_normalization_4[0][0]']
max_pooling2d_1 (MaxPooling2D) (None, 7, 7, 192) 0 ['activation_4[0][0]']
conv2d_8 (Conv2D) (None, 7, 7, 64) 12288 ['max_pooling2d_1[0][0]']
batch_normalization_8 (BatchNo (None, 7, 7, 64) 192 ['conv2d_8[0][0]']
rmalization)
activation_8 (Activation) (None, 7, 7, 64) 0 ['batch_normalization_8[0][0]']
conv2d_6 (Conv2D) (None, 7, 7, 48) 9216 ['max_pooling2d_1[0][0]']
conv2d_9 (Conv2D) (None, 7, 7, 96) 55296 ['activation_8[0][0]']
batch_normalization_6 (BatchNo (None, 7, 7, 48) 144 ['conv2d_6[0][0]']
rmalization)
batch_normalization_9 (BatchNo (None, 7, 7, 96) 288 ['conv2d_9[0][0]']
rmalization)
activation_6 (Activation) (None, 7, 7, 48) 0 ['batch_normalization_6[0][0]']
activation_9 (Activation) (None, 7, 7, 96) 0 ['batch_normalization_9[0][0]']
average_pooling2d (AveragePool (None, 7, 7, 192) 0 ['max_pooling2d_1[0][0]']
ing2D)
conv2d_5 (Conv2D) (None, 7, 7, 64) 12288 ['max_pooling2d_1[0][0]']
conv2d_7 (Conv2D) (None, 7, 7, 64) 76800 ['activation_6[0][0]']
conv2d_10 (Conv2D) (None, 7, 7, 96) 82944 ['activation_9[0][0]']
conv2d_11 (Conv2D) (None, 7, 7, 32) 6144 ['average_pooling2d[0][0]']
batch_normalization_5 (BatchNo (None, 7, 7, 64) 192 ['conv2d_5[0][0]']
rmalization)
batch_normalization_7 (BatchNo (None, 7, 7, 64) 192 ['conv2d_7[0][0]']
rmalization)
batch_normalization_10 (BatchN (None, 7, 7, 96) 288 ['conv2d_10[0][0]']
ormalization)
batch_normalization_11 (BatchN (None, 7, 7, 32) 96 ['conv2d_11[0][0]']
ormalization)
activation_5 (Activation) (None, 7, 7, 64) 0 ['batch_normalization_5[0][0]']
activation_7 (Activation) (None, 7, 7, 64) 0 ['batch_normalization_7[0][0]']
activation_10 (Activation) (None, 7, 7, 96) 0 ['batch_normalization_10[0][0]']
activation_11 (Activation) (None, 7, 7, 32) 0 ['batch_normalization_11[0][0]']
mixed0 (Concatenate) (None, 7, 7, 256) 0 ['activation_5[0][0]',
'activation_7[0][0]',
'activation_10[0][0]',
'activation_11[0][0]']
conv2d_15 (Conv2D) (None, 7, 7, 64) 16384 ['mixed0[0][0]']
batch_normalization_15 (BatchN (None, 7, 7, 64) 192 ['conv2d_15[0][0]']
ormalization)
activation_15 (Activation) (None, 7, 7, 64) 0 ['batch_normalization_15[0][0]']
conv2d_13 (Conv2D) (None, 7, 7, 48) 12288 ['mixed0[0][0]']
conv2d_16 (Conv2D) (None, 7, 7, 96) 55296 ['activation_15[0][0]']
batch_normalization_13 (BatchN (None, 7, 7, 48) 144 ['conv2d_13[0][0]']
ormalization)
batch_normalization_16 (BatchN (None, 7, 7, 96) 288 ['conv2d_16[0][0]']
ormalization)
activation_13 (Activation) (None, 7, 7, 48) 0 ['batch_normalization_13[0][0]']
activation_16 (Activation) (None, 7, 7, 96) 0 ['batch_normalization_16[0][0]']
average_pooling2d_1 (AveragePo (None, 7, 7, 256) 0 ['mixed0[0][0]']
oling2D)
conv2d_12 (Conv2D) (None, 7, 7, 64) 16384 ['mixed0[0][0]']
conv2d_14 (Conv2D) (None, 7, 7, 64) 76800 ['activation_13[0][0]']
conv2d_17 (Conv2D) (None, 7, 7, 96) 82944 ['activation_16[0][0]']
conv2d_18 (Conv2D) (None, 7, 7, 64) 16384 ['average_pooling2d_1[0][0]']
batch_normalization_12 (BatchN (None, 7, 7, 64) 192 ['conv2d_12[0][0]']
ormalization)
batch_normalization_14 (BatchN (None, 7, 7, 64) 192 ['conv2d_14[0][0]']
ormalization)
batch_normalization_17 (BatchN (None, 7, 7, 96) 288 ['conv2d_17[0][0]']
ormalization)
batch_normalization_18 (BatchN (None, 7, 7, 64) 192 ['conv2d_18[0][0]']
ormalization)
activation_12 (Activation) (None, 7, 7, 64) 0 ['batch_normalization_12[0][0]']
activation_14 (Activation) (None, 7, 7, 64) 0 ['batch_normalization_14[0][0]']
activation_17 (Activation) (None, 7, 7, 96) 0 ['batch_normalization_17[0][0]']
activation_18 (Activation) (None, 7, 7, 64) 0 ['batch_normalization_18[0][0]']
mixed1 (Concatenate) (None, 7, 7, 288) 0 ['activation_12[0][0]',
'activation_14[0][0]',
'activation_17[0][0]',
'activation_18[0][0]']
conv2d_22 (Conv2D) (None, 7, 7, 64) 18432 ['mixed1[0][0]']
batch_normalization_22 (BatchN (None, 7, 7, 64) 192 ['conv2d_22[0][0]']
ormalization)
activation_22 (Activation) (None, 7, 7, 64) 0 ['batch_normalization_22[0][0]']
conv2d_20 (Conv2D) (None, 7, 7, 48) 13824 ['mixed1[0][0]']
conv2d_23 (Conv2D) (None, 7, 7, 96) 55296 ['activation_22[0][0]']
batch_normalization_20 (BatchN (None, 7, 7, 48) 144 ['conv2d_20[0][0]']
ormalization)
batch_normalization_23 (BatchN (None, 7, 7, 96) 288 ['conv2d_23[0][0]']
ormalization)
activation_20 (Activation) (None, 7, 7, 48) 0 ['batch_normalization_20[0][0]']
activation_23 (Activation) (None, 7, 7, 96) 0 ['batch_normalization_23[0][0]']
average_pooling2d_2 (AveragePo (None, 7, 7, 288) 0 ['mixed1[0][0]']
oling2D)
conv2d_19 (Conv2D) (None, 7, 7, 64) 18432 ['mixed1[0][0]']
conv2d_21 (Conv2D) (None, 7, 7, 64) 76800 ['activation_20[0][0]']
conv2d_24 (Conv2D) (None, 7, 7, 96) 82944 ['activation_23[0][0]']
conv2d_25 (Conv2D) (None, 7, 7, 64) 18432 ['average_pooling2d_2[0][0]']
batch_normalization_19 (BatchN (None, 7, 7, 64) 192 ['conv2d_19[0][0]']
ormalization)
batch_normalization_21 (BatchN (None, 7, 7, 64) 192 ['conv2d_21[0][0]']
ormalization)
batch_normalization_24 (BatchN (None, 7, 7, 96) 288 ['conv2d_24[0][0]']
ormalization)
batch_normalization_25 (BatchN (None, 7, 7, 64) 192 ['conv2d_25[0][0]']
ormalization)
activation_19 (Activation) (None, 7, 7, 64) 0 ['batch_normalization_19[0][0]']
activation_21 (Activation) (None, 7, 7, 64) 0 ['batch_normalization_21[0][0]']
activation_24 (Activation) (None, 7, 7, 96) 0 ['batch_normalization_24[0][0]']
activation_25 (Activation) (None, 7, 7, 64) 0 ['batch_normalization_25[0][0]']
mixed2 (Concatenate) (None, 7, 7, 288) 0 ['activation_19[0][0]',
'activation_21[0][0]',
'activation_24[0][0]',
'activation_25[0][0]']
conv2d_27 (Conv2D) (None, 7, 7, 64) 18432 ['mixed2[0][0]']
batch_normalization_27 (BatchN (None, 7, 7, 64) 192 ['conv2d_27[0][0]']
ormalization)
activation_27 (Activation) (None, 7, 7, 64) 0 ['batch_normalization_27[0][0]']
conv2d_28 (Conv2D) (None, 7, 7, 96) 55296 ['activation_27[0][0]']
batch_normalization_28 (BatchN (None, 7, 7, 96) 288 ['conv2d_28[0][0]']
ormalization)
activation_28 (Activation) (None, 7, 7, 96) 0 ['batch_normalization_28[0][0]']
conv2d_26 (Conv2D) (None, 3, 3, 384) 995328 ['mixed2[0][0]']
conv2d_29 (Conv2D) (None, 3, 3, 96) 82944 ['activation_28[0][0]']
batch_normalization_26 (BatchN (None, 3, 3, 384) 1152 ['conv2d_26[0][0]']
ormalization)
batch_normalization_29 (BatchN (None, 3, 3, 96) 288 ['conv2d_29[0][0]']
ormalization)
activation_26 (Activation) (None, 3, 3, 384) 0 ['batch_normalization_26[0][0]']
activation_29 (Activation) (None, 3, 3, 96) 0 ['batch_normalization_29[0][0]']
max_pooling2d_2 (MaxPooling2D) (None, 3, 3, 288) 0 ['mixed2[0][0]']
mixed3 (Concatenate) (None, 3, 3, 768) 0 ['activation_26[0][0]',
'activation_29[0][0]',
'max_pooling2d_2[0][0]']
conv2d_34 (Conv2D) (None, 3, 3, 128) 98304 ['mixed3[0][0]']
batch_normalization_34 (BatchN (None, 3, 3, 128) 384 ['conv2d_34[0][0]']
ormalization)
activation_34 (Activation) (None, 3, 3, 128) 0 ['batch_normalization_34[0][0]']
conv2d_35 (Conv2D) (None, 3, 3, 128) 114688 ['activation_34[0][0]']
batch_normalization_35 (BatchN (None, 3, 3, 128) 384 ['conv2d_35[0][0]']
ormalization)
activation_35 (Activation) (None, 3, 3, 128) 0 ['batch_normalization_35[0][0]']
conv2d_31 (Conv2D) (None, 3, 3, 128) 98304 ['mixed3[0][0]']
conv2d_36 (Conv2D) (None, 3, 3, 128) 114688 ['activation_35[0][0]']
batch_normalization_31 (BatchN (None, 3, 3, 128) 384 ['conv2d_31[0][0]']
ormalization)
batch_normalization_36 (BatchN (None, 3, 3, 128) 384 ['conv2d_36[0][0]']
ormalization)
activation_31 (Activation) (None, 3, 3, 128) 0 ['batch_normalization_31[0][0]']
activation_36 (Activation) (None, 3, 3, 128) 0 ['batch_normalization_36[0][0]']
conv2d_32 (Conv2D) (None, 3, 3, 128) 114688 ['activation_31[0][0]']
conv2d_37 (Conv2D) (None, 3, 3, 128) 114688 ['activation_36[0][0]']
batch_normalization_32 (BatchN (None, 3, 3, 128) 384 ['conv2d_32[0][0]']
ormalization)
batch_normalization_37 (BatchN (None, 3, 3, 128) 384 ['conv2d_37[0][0]']
ormalization)
activation_32 (Activation) (None, 3, 3, 128) 0 ['batch_normalization_32[0][0]']
activation_37 (Activation) (None, 3, 3, 128) 0 ['batch_normalization_37[0][0]']
average_pooling2d_3 (AveragePo (None, 3, 3, 768) 0 ['mixed3[0][0]']
oling2D)
conv2d_30 (Conv2D) (None, 3, 3, 192) 147456 ['mixed3[0][0]']
conv2d_33 (Conv2D) (None, 3, 3, 192) 172032 ['activation_32[0][0]']
conv2d_38 (Conv2D) (None, 3, 3, 192) 172032 ['activation_37[0][0]']
conv2d_39 (Conv2D) (None, 3, 3, 192) 147456 ['average_pooling2d_3[0][0]']
batch_normalization_30 (BatchN (None, 3, 3, 192) 576 ['conv2d_30[0][0]']
ormalization)
batch_normalization_33 (BatchN (None, 3, 3, 192) 576 ['conv2d_33[0][0]']
ormalization)
batch_normalization_38 (BatchN (None, 3, 3, 192) 576 ['conv2d_38[0][0]']
ormalization)
batch_normalization_39 (BatchN (None, 3, 3, 192) 576 ['conv2d_39[0][0]']
ormalization)
activation_30 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_30[0][0]']
activation_33 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_33[0][0]']
activation_38 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_38[0][0]']
activation_39 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_39[0][0]']
mixed4 (Concatenate) (None, 3, 3, 768) 0 ['activation_30[0][0]',
'activation_33[0][0]',
'activation_38[0][0]',
'activation_39[0][0]']
conv2d_44 (Conv2D) (None, 3, 3, 160) 122880 ['mixed4[0][0]']
batch_normalization_44 (BatchN (None, 3, 3, 160) 480 ['conv2d_44[0][0]']
ormalization)
activation_44 (Activation) (None, 3, 3, 160) 0 ['batch_normalization_44[0][0]']
conv2d_45 (Conv2D) (None, 3, 3, 160) 179200 ['activation_44[0][0]']
batch_normalization_45 (BatchN (None, 3, 3, 160) 480 ['conv2d_45[0][0]']
ormalization)
activation_45 (Activation) (None, 3, 3, 160) 0 ['batch_normalization_45[0][0]']
conv2d_41 (Conv2D) (None, 3, 3, 160) 122880 ['mixed4[0][0]']
conv2d_46 (Conv2D) (None, 3, 3, 160) 179200 ['activation_45[0][0]']
batch_normalization_41 (BatchN (None, 3, 3, 160) 480 ['conv2d_41[0][0]']
ormalization)
batch_normalization_46 (BatchN (None, 3, 3, 160) 480 ['conv2d_46[0][0]']
ormalization)
activation_41 (Activation) (None, 3, 3, 160) 0 ['batch_normalization_41[0][0]']
activation_46 (Activation) (None, 3, 3, 160) 0 ['batch_normalization_46[0][0]']
conv2d_42 (Conv2D) (None, 3, 3, 160) 179200 ['activation_41[0][0]']
conv2d_47 (Conv2D) (None, 3, 3, 160) 179200 ['activation_46[0][0]']
batch_normalization_42 (BatchN (None, 3, 3, 160) 480 ['conv2d_42[0][0]']
ormalization)
batch_normalization_47 (BatchN (None, 3, 3, 160) 480 ['conv2d_47[0][0]']
ormalization)
activation_42 (Activation) (None, 3, 3, 160) 0 ['batch_normalization_42[0][0]']
activation_47 (Activation) (None, 3, 3, 160) 0 ['batch_normalization_47[0][0]']
average_pooling2d_4 (AveragePo (None, 3, 3, 768) 0 ['mixed4[0][0]']
oling2D)
conv2d_40 (Conv2D) (None, 3, 3, 192) 147456 ['mixed4[0][0]']
conv2d_43 (Conv2D) (None, 3, 3, 192) 215040 ['activation_42[0][0]']
conv2d_48 (Conv2D) (None, 3, 3, 192) 215040 ['activation_47[0][0]']
conv2d_49 (Conv2D) (None, 3, 3, 192) 147456 ['average_pooling2d_4[0][0]']
batch_normalization_40 (BatchN (None, 3, 3, 192) 576 ['conv2d_40[0][0]']
ormalization)
batch_normalization_43 (BatchN (None, 3, 3, 192) 576 ['conv2d_43[0][0]']
ormalization)
batch_normalization_48 (BatchN (None, 3, 3, 192) 576 ['conv2d_48[0][0]']
ormalization)
batch_normalization_49 (BatchN (None, 3, 3, 192) 576 ['conv2d_49[0][0]']
ormalization)
activation_40 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_40[0][0]']
activation_43 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_43[0][0]']
activation_48 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_48[0][0]']
activation_49 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_49[0][0]']
mixed5 (Concatenate) (None, 3, 3, 768) 0 ['activation_40[0][0]',
'activation_43[0][0]',
'activation_48[0][0]',
'activation_49[0][0]']
conv2d_54 (Conv2D) (None, 3, 3, 160) 122880 ['mixed5[0][0]']
batch_normalization_54 (BatchN (None, 3, 3, 160) 480 ['conv2d_54[0][0]']
ormalization)
activation_54 (Activation) (None, 3, 3, 160) 0 ['batch_normalization_54[0][0]']
conv2d_55 (Conv2D) (None, 3, 3, 160) 179200 ['activation_54[0][0]']
batch_normalization_55 (BatchN (None, 3, 3, 160) 480 ['conv2d_55[0][0]']
ormalization)
activation_55 (Activation) (None, 3, 3, 160) 0 ['batch_normalization_55[0][0]']
conv2d_51 (Conv2D) (None, 3, 3, 160) 122880 ['mixed5[0][0]']
conv2d_56 (Conv2D) (None, 3, 3, 160) 179200 ['activation_55[0][0]']
batch_normalization_51 (BatchN (None, 3, 3, 160) 480 ['conv2d_51[0][0]']
ormalization)
batch_normalization_56 (BatchN (None, 3, 3, 160) 480 ['conv2d_56[0][0]']
ormalization)
activation_51 (Activation) (None, 3, 3, 160) 0 ['batch_normalization_51[0][0]']
activation_56 (Activation) (None, 3, 3, 160) 0 ['batch_normalization_56[0][0]']
conv2d_52 (Conv2D) (None, 3, 3, 160) 179200 ['activation_51[0][0]']
conv2d_57 (Conv2D) (None, 3, 3, 160) 179200 ['activation_56[0][0]']
batch_normalization_52 (BatchN (None, 3, 3, 160) 480 ['conv2d_52[0][0]']
ormalization)
batch_normalization_57 (BatchN (None, 3, 3, 160) 480 ['conv2d_57[0][0]']
ormalization)
activation_52 (Activation) (None, 3, 3, 160) 0 ['batch_normalization_52[0][0]']
activation_57 (Activation) (None, 3, 3, 160) 0 ['batch_normalization_57[0][0]']
average_pooling2d_5 (AveragePo (None, 3, 3, 768) 0 ['mixed5[0][0]']
oling2D)
conv2d_50 (Conv2D) (None, 3, 3, 192) 147456 ['mixed5[0][0]']
conv2d_53 (Conv2D) (None, 3, 3, 192) 215040 ['activation_52[0][0]']
conv2d_58 (Conv2D) (None, 3, 3, 192) 215040 ['activation_57[0][0]']
conv2d_59 (Conv2D) (None, 3, 3, 192) 147456 ['average_pooling2d_5[0][0]']
batch_normalization_50 (BatchN (None, 3, 3, 192) 576 ['conv2d_50[0][0]']
ormalization)
batch_normalization_53 (BatchN (None, 3, 3, 192) 576 ['conv2d_53[0][0]']
ormalization)
batch_normalization_58 (BatchN (None, 3, 3, 192) 576 ['conv2d_58[0][0]']
ormalization)
batch_normalization_59 (BatchN (None, 3, 3, 192) 576 ['conv2d_59[0][0]']
ormalization)
activation_50 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_50[0][0]']
activation_53 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_53[0][0]']
activation_58 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_58[0][0]']
activation_59 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_59[0][0]']
mixed6 (Concatenate) (None, 3, 3, 768) 0 ['activation_50[0][0]',
'activation_53[0][0]',
'activation_58[0][0]',
'activation_59[0][0]']
conv2d_64 (Conv2D) (None, 3, 3, 192) 147456 ['mixed6[0][0]']
batch_normalization_64 (BatchN (None, 3, 3, 192) 576 ['conv2d_64[0][0]']
ormalization)
activation_64 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_64[0][0]']
conv2d_65 (Conv2D) (None, 3, 3, 192) 258048 ['activation_64[0][0]']
batch_normalization_65 (BatchN (None, 3, 3, 192) 576 ['conv2d_65[0][0]']
ormalization)
activation_65 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_65[0][0]']
conv2d_61 (Conv2D) (None, 3, 3, 192) 147456 ['mixed6[0][0]']
conv2d_66 (Conv2D) (None, 3, 3, 192) 258048 ['activation_65[0][0]']
batch_normalization_61 (BatchN (None, 3, 3, 192) 576 ['conv2d_61[0][0]']
ormalization)
batch_normalization_66 (BatchN (None, 3, 3, 192) 576 ['conv2d_66[0][0]']
ormalization)
activation_61 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_61[0][0]']
activation_66 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_66[0][0]']
conv2d_62 (Conv2D) (None, 3, 3, 192) 258048 ['activation_61[0][0]']
conv2d_67 (Conv2D) (None, 3, 3, 192) 258048 ['activation_66[0][0]']
batch_normalization_62 (BatchN (None, 3, 3, 192) 576 ['conv2d_62[0][0]']
ormalization)
batch_normalization_67 (BatchN (None, 3, 3, 192) 576 ['conv2d_67[0][0]']
ormalization)
activation_62 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_62[0][0]']
activation_67 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_67[0][0]']
average_pooling2d_6 (AveragePo (None, 3, 3, 768) 0 ['mixed6[0][0]']
oling2D)
conv2d_60 (Conv2D) (None, 3, 3, 192) 147456 ['mixed6[0][0]']
conv2d_63 (Conv2D) (None, 3, 3, 192) 258048 ['activation_62[0][0]']
conv2d_68 (Conv2D) (None, 3, 3, 192) 258048 ['activation_67[0][0]']
conv2d_69 (Conv2D) (None, 3, 3, 192) 147456 ['average_pooling2d_6[0][0]']
batch_normalization_60 (BatchN (None, 3, 3, 192) 576 ['conv2d_60[0][0]']
ormalization)
batch_normalization_63 (BatchN (None, 3, 3, 192) 576 ['conv2d_63[0][0]']
ormalization)
batch_normalization_68 (BatchN (None, 3, 3, 192) 576 ['conv2d_68[0][0]']
ormalization)
batch_normalization_69 (BatchN (None, 3, 3, 192) 576 ['conv2d_69[0][0]']
ormalization)
activation_60 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_60[0][0]']
activation_63 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_63[0][0]']
activation_68 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_68[0][0]']
activation_69 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_69[0][0]']
mixed7 (Concatenate) (None, 3, 3, 768) 0 ['activation_60[0][0]',
'activation_63[0][0]',
'activation_68[0][0]',
'activation_69[0][0]']
conv2d_72 (Conv2D) (None, 3, 3, 192) 147456 ['mixed7[0][0]']
batch_normalization_72 (BatchN (None, 3, 3, 192) 576 ['conv2d_72[0][0]']
ormalization)
activation_72 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_72[0][0]']
conv2d_73 (Conv2D) (None, 3, 3, 192) 258048 ['activation_72[0][0]']
batch_normalization_73 (BatchN (None, 3, 3, 192) 576 ['conv2d_73[0][0]']
ormalization)
activation_73 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_73[0][0]']
conv2d_70 (Conv2D) (None, 3, 3, 192) 147456 ['mixed7[0][0]']
conv2d_74 (Conv2D) (None, 3, 3, 192) 258048 ['activation_73[0][0]']
batch_normalization_70 (BatchN (None, 3, 3, 192) 576 ['conv2d_70[0][0]']
ormalization)
batch_normalization_74 (BatchN (None, 3, 3, 192) 576 ['conv2d_74[0][0]']
ormalization)
activation_70 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_70[0][0]']
activation_74 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_74[0][0]']
conv2d_71 (Conv2D) (None, 1, 1, 320) 552960 ['activation_70[0][0]']
conv2d_75 (Conv2D) (None, 1, 1, 192) 331776 ['activation_74[0][0]']
batch_normalization_71 (BatchN (None, 1, 1, 320) 960 ['conv2d_71[0][0]']
ormalization)
batch_normalization_75 (BatchN (None, 1, 1, 192) 576 ['conv2d_75[0][0]']
ormalization)
activation_71 (Activation) (None, 1, 1, 320) 0 ['batch_normalization_71[0][0]']
activation_75 (Activation) (None, 1, 1, 192) 0 ['batch_normalization_75[0][0]']
max_pooling2d_3 (MaxPooling2D) (None, 1, 1, 768) 0 ['mixed7[0][0]']
mixed8 (Concatenate) (None, 1, 1, 1280) 0 ['activation_71[0][0]',
'activation_75[0][0]',
'max_pooling2d_3[0][0]']
conv2d_80 (Conv2D) (None, 1, 1, 448) 573440 ['mixed8[0][0]']
batch_normalization_80 (BatchN (None, 1, 1, 448) 1344 ['conv2d_80[0][0]']
ormalization)
activation_80 (Activation) (None, 1, 1, 448) 0 ['batch_normalization_80[0][0]']
conv2d_77 (Conv2D) (None, 1, 1, 384) 491520 ['mixed8[0][0]']
conv2d_81 (Conv2D) (None, 1, 1, 384) 1548288 ['activation_80[0][0]']
batch_normalization_77 (BatchN (None, 1, 1, 384) 1152 ['conv2d_77[0][0]']
ormalization)
batch_normalization_81 (BatchN (None, 1, 1, 384) 1152 ['conv2d_81[0][0]']
ormalization)
activation_77 (Activation) (None, 1, 1, 384) 0 ['batch_normalization_77[0][0]']
activation_81 (Activation) (None, 1, 1, 384) 0 ['batch_normalization_81[0][0]']
conv2d_78 (Conv2D) (None, 1, 1, 384) 442368 ['activation_77[0][0]']
conv2d_79 (Conv2D) (None, 1, 1, 384) 442368 ['activation_77[0][0]']
conv2d_82 (Conv2D) (None, 1, 1, 384) 442368 ['activation_81[0][0]']
conv2d_83 (Conv2D) (None, 1, 1, 384) 442368 ['activation_81[0][0]']
average_pooling2d_7 (AveragePo (None, 1, 1, 1280) 0 ['mixed8[0][0]']
oling2D)
conv2d_76 (Conv2D) (None, 1, 1, 320) 409600 ['mixed8[0][0]']
batch_normalization_78 (BatchN (None, 1, 1, 384) 1152 ['conv2d_78[0][0]']
ormalization)
batch_normalization_79 (BatchN (None, 1, 1, 384) 1152 ['conv2d_79[0][0]']
ormalization)
batch_normalization_82 (BatchN (None, 1, 1, 384) 1152 ['conv2d_82[0][0]']
ormalization)
batch_normalization_83 (BatchN (None, 1, 1, 384) 1152 ['conv2d_83[0][0]']
ormalization)
conv2d_84 (Conv2D) (None, 1, 1, 192) 245760 ['average_pooling2d_7[0][0]']
batch_normalization_76 (BatchN (None, 1, 1, 320) 960 ['conv2d_76[0][0]']
ormalization)
activation_78 (Activation) (None, 1, 1, 384) 0 ['batch_normalization_78[0][0]']
activation_79 (Activation) (None, 1, 1, 384) 0 ['batch_normalization_79[0][0]']
activation_82 (Activation) (None, 1, 1, 384) 0 ['batch_normalization_82[0][0]']
activation_83 (Activation) (None, 1, 1, 384) 0 ['batch_normalization_83[0][0]']
batch_normalization_84 (BatchN (None, 1, 1, 192) 576 ['conv2d_84[0][0]']
ormalization)
activation_76 (Activation) (None, 1, 1, 320) 0 ['batch_normalization_76[0][0]']
mixed9_0 (Concatenate) (None, 1, 1, 768) 0 ['activation_78[0][0]',
'activation_79[0][0]']
concatenate (Concatenate) (None, 1, 1, 768) 0 ['activation_82[0][0]',
'activation_83[0][0]']
activation_84 (Activation) (None, 1, 1, 192) 0 ['batch_normalization_84[0][0]']
mixed9 (Concatenate) (None, 1, 1, 2048) 0 ['activation_76[0][0]',
'mixed9_0[0][0]',
'concatenate[0][0]',
'activation_84[0][0]']
conv2d_89 (Conv2D) (None, 1, 1, 448) 917504 ['mixed9[0][0]']
batch_normalization_89 (BatchN (None, 1, 1, 448) 1344 ['conv2d_89[0][0]']
ormalization)
activation_89 (Activation) (None, 1, 1, 448) 0 ['batch_normalization_89[0][0]']
conv2d_86 (Conv2D) (None, 1, 1, 384) 786432 ['mixed9[0][0]']
conv2d_90 (Conv2D) (None, 1, 1, 384) 1548288 ['activation_89[0][0]']
batch_normalization_86 (BatchN (None, 1, 1, 384) 1152 ['conv2d_86[0][0]']
ormalization)
batch_normalization_90 (BatchN (None, 1, 1, 384) 1152 ['conv2d_90[0][0]']
ormalization)
activation_86 (Activation) (None, 1, 1, 384) 0 ['batch_normalization_86[0][0]']
activation_90 (Activation) (None, 1, 1, 384) 0 ['batch_normalization_90[0][0]']
conv2d_87 (Conv2D) (None, 1, 1, 384) 442368 ['activation_86[0][0]']
conv2d_88 (Conv2D) (None, 1, 1, 384) 442368 ['activation_86[0][0]']
conv2d_91 (Conv2D) (None, 1, 1, 384) 442368 ['activation_90[0][0]']
conv2d_92 (Conv2D) (None, 1, 1, 384) 442368 ['activation_90[0][0]']
average_pooling2d_8 (AveragePo (None, 1, 1, 2048) 0 ['mixed9[0][0]']
oling2D)
conv2d_85 (Conv2D) (None, 1, 1, 320) 655360 ['mixed9[0][0]']
batch_normalization_87 (BatchN (None, 1, 1, 384) 1152 ['conv2d_87[0][0]']
ormalization)
batch_normalization_88 (BatchN (None, 1, 1, 384) 1152 ['conv2d_88[0][0]']
ormalization)
batch_normalization_91 (BatchN (None, 1, 1, 384) 1152 ['conv2d_91[0][0]']
ormalization)
batch_normalization_92 (BatchN (None, 1, 1, 384) 1152 ['conv2d_92[0][0]']
ormalization)
conv2d_93 (Conv2D) (None, 1, 1, 192) 393216 ['average_pooling2d_8[0][0]']
batch_normalization_85 (BatchN (None, 1, 1, 320) 960 ['conv2d_85[0][0]']
ormalization)
activation_87 (Activation) (None, 1, 1, 384) 0 ['batch_normalization_87[0][0]']
activation_88 (Activation) (None, 1, 1, 384) 0 ['batch_normalization_88[0][0]']
activation_91 (Activation) (None, 1, 1, 384) 0 ['batch_normalization_91[0][0]']
activation_92 (Activation) (None, 1, 1, 384) 0 ['batch_normalization_92[0][0]']
batch_normalization_93 (BatchN (None, 1, 1, 192) 576 ['conv2d_93[0][0]']
ormalization)
activation_85 (Activation) (None, 1, 1, 320) 0 ['batch_normalization_85[0][0]']
mixed9_1 (Concatenate) (None, 1, 1, 768) 0 ['activation_87[0][0]',
'activation_88[0][0]']
concatenate_1 (Concatenate) (None, 1, 1, 768) 0 ['activation_91[0][0]',
'activation_92[0][0]']
activation_93 (Activation) (None, 1, 1, 192) 0 ['batch_normalization_93[0][0]']
mixed10 (Concatenate) (None, 1, 1, 2048) 0 ['activation_85[0][0]',
'mixed9_1[0][0]',
'concatenate_1[0][0]',
'activation_93[0][0]']
flatten (Flatten) (None, 2048) 0 ['mixed10[0][0]']
dense (Dense) (None, 256) 524544 ['flatten[0][0]']
dense_1 (Dense) (None, 128) 32896 ['dense[0][0]']
dropout (Dropout) (None, 128) 0 ['dense_1[0][0]']
dense_2 (Dense) (None, 64) 8256 ['dropout[0][0]']
batch_normalization_94 (BatchN (None, 64) 256 ['dense_2[0][0]']
ormalization)
dense_3 (Dense) (None, 2) 130 ['batch_normalization_94[0][0]']
==================================================================================================
Total params: 22,368,866
Trainable params: 22,334,306
Non-trainable params: 34,560
__________________________________________________________________________________________________
Epoch 1/20
624/624 [==============================] - 499s 786ms/step - loss: 0.1333 - accuracy: 0.9604 - val_loss: 0.1255 - val_accuracy: 0.9692
Epoch 2/20
624/624 [==============================] - 488s 783ms/step - loss: 0.0707 - accuracy: 0.9775 - val_loss: 0.0456 - val_accuracy: 0.9786
Epoch 3/20
624/624 [==============================] - 487s 780ms/step - loss: 0.0860 - accuracy: 0.9721 - val_loss: 0.0481 - val_accuracy: 0.9870
Epoch 4/20
624/624 [==============================] - 484s 776ms/step - loss: 0.0857 - accuracy: 0.9736 - val_loss: 0.0863 - val_accuracy: 0.9754
Calculating Accuracy:
82/82 [==============================] - 9s 91ms/step - loss: 0.1322 - accuracy: 0.9835
Test Accuracy: 0.9834615588188171
Classification Report
precision recall f1-score support
0 0.98 0.99 0.98 1300
1 0.99 0.98 0.98 1300
accuracy 0.98 2600
macro avg 0.98 0.98 0.98 2600
weighted avg 0.98 0.98 0.98 2600
Confusion Matrix
Train and Validation Accuracy
model41 = ModelWithTransferLearningInceptionV3(rgbAugmentedData_increasedSize, 'Model With Transfer Learning Inception V3 | RGB data | Data augmentation')
model41.execute()
Model: "inception_v3"
__________________________________________________________________________________________________
Layer (type) Output Shape Param # Connected to
==================================================================================================
input_1 (InputLayer) [(None, 75, 75, 3)] 0 []
conv2d (Conv2D) (None, 37, 37, 32) 864 ['input_1[0][0]']
batch_normalization (BatchNorm (None, 37, 37, 32) 96 ['conv2d[0][0]']
alization)
activation (Activation) (None, 37, 37, 32) 0 ['batch_normalization[0][0]']
conv2d_1 (Conv2D) (None, 35, 35, 32) 9216 ['activation[0][0]']
batch_normalization_1 (BatchNo (None, 35, 35, 32) 96 ['conv2d_1[0][0]']
rmalization)
activation_1 (Activation) (None, 35, 35, 32) 0 ['batch_normalization_1[0][0]']
conv2d_2 (Conv2D) (None, 35, 35, 64) 18432 ['activation_1[0][0]']
batch_normalization_2 (BatchNo (None, 35, 35, 64) 192 ['conv2d_2[0][0]']
rmalization)
activation_2 (Activation) (None, 35, 35, 64) 0 ['batch_normalization_2[0][0]']
max_pooling2d (MaxPooling2D) (None, 17, 17, 64) 0 ['activation_2[0][0]']
conv2d_3 (Conv2D) (None, 17, 17, 80) 5120 ['max_pooling2d[0][0]']
batch_normalization_3 (BatchNo (None, 17, 17, 80) 240 ['conv2d_3[0][0]']
rmalization)
activation_3 (Activation) (None, 17, 17, 80) 0 ['batch_normalization_3[0][0]']
conv2d_4 (Conv2D) (None, 15, 15, 192) 138240 ['activation_3[0][0]']
batch_normalization_4 (BatchNo (None, 15, 15, 192) 576 ['conv2d_4[0][0]']
rmalization)
activation_4 (Activation) (None, 15, 15, 192) 0 ['batch_normalization_4[0][0]']
max_pooling2d_1 (MaxPooling2D) (None, 7, 7, 192) 0 ['activation_4[0][0]']
conv2d_8 (Conv2D) (None, 7, 7, 64) 12288 ['max_pooling2d_1[0][0]']
batch_normalization_8 (BatchNo (None, 7, 7, 64) 192 ['conv2d_8[0][0]']
rmalization)
activation_8 (Activation) (None, 7, 7, 64) 0 ['batch_normalization_8[0][0]']
conv2d_6 (Conv2D) (None, 7, 7, 48) 9216 ['max_pooling2d_1[0][0]']
conv2d_9 (Conv2D) (None, 7, 7, 96) 55296 ['activation_8[0][0]']
batch_normalization_6 (BatchNo (None, 7, 7, 48) 144 ['conv2d_6[0][0]']
rmalization)
batch_normalization_9 (BatchNo (None, 7, 7, 96) 288 ['conv2d_9[0][0]']
rmalization)
activation_6 (Activation) (None, 7, 7, 48) 0 ['batch_normalization_6[0][0]']
activation_9 (Activation) (None, 7, 7, 96) 0 ['batch_normalization_9[0][0]']
average_pooling2d (AveragePool (None, 7, 7, 192) 0 ['max_pooling2d_1[0][0]']
ing2D)
conv2d_5 (Conv2D) (None, 7, 7, 64) 12288 ['max_pooling2d_1[0][0]']
conv2d_7 (Conv2D) (None, 7, 7, 64) 76800 ['activation_6[0][0]']
conv2d_10 (Conv2D) (None, 7, 7, 96) 82944 ['activation_9[0][0]']
conv2d_11 (Conv2D) (None, 7, 7, 32) 6144 ['average_pooling2d[0][0]']
batch_normalization_5 (BatchNo (None, 7, 7, 64) 192 ['conv2d_5[0][0]']
rmalization)
batch_normalization_7 (BatchNo (None, 7, 7, 64) 192 ['conv2d_7[0][0]']
rmalization)
batch_normalization_10 (BatchN (None, 7, 7, 96) 288 ['conv2d_10[0][0]']
ormalization)
batch_normalization_11 (BatchN (None, 7, 7, 32) 96 ['conv2d_11[0][0]']
ormalization)
activation_5 (Activation) (None, 7, 7, 64) 0 ['batch_normalization_5[0][0]']
activation_7 (Activation) (None, 7, 7, 64) 0 ['batch_normalization_7[0][0]']
activation_10 (Activation) (None, 7, 7, 96) 0 ['batch_normalization_10[0][0]']
activation_11 (Activation) (None, 7, 7, 32) 0 ['batch_normalization_11[0][0]']
mixed0 (Concatenate) (None, 7, 7, 256) 0 ['activation_5[0][0]',
'activation_7[0][0]',
'activation_10[0][0]',
'activation_11[0][0]']
conv2d_15 (Conv2D) (None, 7, 7, 64) 16384 ['mixed0[0][0]']
batch_normalization_15 (BatchN (None, 7, 7, 64) 192 ['conv2d_15[0][0]']
ormalization)
activation_15 (Activation) (None, 7, 7, 64) 0 ['batch_normalization_15[0][0]']
conv2d_13 (Conv2D) (None, 7, 7, 48) 12288 ['mixed0[0][0]']
conv2d_16 (Conv2D) (None, 7, 7, 96) 55296 ['activation_15[0][0]']
batch_normalization_13 (BatchN (None, 7, 7, 48) 144 ['conv2d_13[0][0]']
ormalization)
batch_normalization_16 (BatchN (None, 7, 7, 96) 288 ['conv2d_16[0][0]']
ormalization)
activation_13 (Activation) (None, 7, 7, 48) 0 ['batch_normalization_13[0][0]']
activation_16 (Activation) (None, 7, 7, 96) 0 ['batch_normalization_16[0][0]']
average_pooling2d_1 (AveragePo (None, 7, 7, 256) 0 ['mixed0[0][0]']
oling2D)
conv2d_12 (Conv2D) (None, 7, 7, 64) 16384 ['mixed0[0][0]']
conv2d_14 (Conv2D) (None, 7, 7, 64) 76800 ['activation_13[0][0]']
conv2d_17 (Conv2D) (None, 7, 7, 96) 82944 ['activation_16[0][0]']
conv2d_18 (Conv2D) (None, 7, 7, 64) 16384 ['average_pooling2d_1[0][0]']
batch_normalization_12 (BatchN (None, 7, 7, 64) 192 ['conv2d_12[0][0]']
ormalization)
batch_normalization_14 (BatchN (None, 7, 7, 64) 192 ['conv2d_14[0][0]']
ormalization)
batch_normalization_17 (BatchN (None, 7, 7, 96) 288 ['conv2d_17[0][0]']
ormalization)
batch_normalization_18 (BatchN (None, 7, 7, 64) 192 ['conv2d_18[0][0]']
ormalization)
activation_12 (Activation) (None, 7, 7, 64) 0 ['batch_normalization_12[0][0]']
activation_14 (Activation) (None, 7, 7, 64) 0 ['batch_normalization_14[0][0]']
activation_17 (Activation) (None, 7, 7, 96) 0 ['batch_normalization_17[0][0]']
activation_18 (Activation) (None, 7, 7, 64) 0 ['batch_normalization_18[0][0]']
mixed1 (Concatenate) (None, 7, 7, 288) 0 ['activation_12[0][0]',
'activation_14[0][0]',
'activation_17[0][0]',
'activation_18[0][0]']
conv2d_22 (Conv2D) (None, 7, 7, 64) 18432 ['mixed1[0][0]']
batch_normalization_22 (BatchN (None, 7, 7, 64) 192 ['conv2d_22[0][0]']
ormalization)
activation_22 (Activation) (None, 7, 7, 64) 0 ['batch_normalization_22[0][0]']
conv2d_20 (Conv2D) (None, 7, 7, 48) 13824 ['mixed1[0][0]']
conv2d_23 (Conv2D) (None, 7, 7, 96) 55296 ['activation_22[0][0]']
batch_normalization_20 (BatchN (None, 7, 7, 48) 144 ['conv2d_20[0][0]']
ormalization)
batch_normalization_23 (BatchN (None, 7, 7, 96) 288 ['conv2d_23[0][0]']
ormalization)
activation_20 (Activation) (None, 7, 7, 48) 0 ['batch_normalization_20[0][0]']
activation_23 (Activation) (None, 7, 7, 96) 0 ['batch_normalization_23[0][0]']
average_pooling2d_2 (AveragePo (None, 7, 7, 288) 0 ['mixed1[0][0]']
oling2D)
conv2d_19 (Conv2D) (None, 7, 7, 64) 18432 ['mixed1[0][0]']
conv2d_21 (Conv2D) (None, 7, 7, 64) 76800 ['activation_20[0][0]']
conv2d_24 (Conv2D) (None, 7, 7, 96) 82944 ['activation_23[0][0]']
conv2d_25 (Conv2D) (None, 7, 7, 64) 18432 ['average_pooling2d_2[0][0]']
batch_normalization_19 (BatchN (None, 7, 7, 64) 192 ['conv2d_19[0][0]']
ormalization)
batch_normalization_21 (BatchN (None, 7, 7, 64) 192 ['conv2d_21[0][0]']
ormalization)
batch_normalization_24 (BatchN (None, 7, 7, 96) 288 ['conv2d_24[0][0]']
ormalization)
batch_normalization_25 (BatchN (None, 7, 7, 64) 192 ['conv2d_25[0][0]']
ormalization)
activation_19 (Activation) (None, 7, 7, 64) 0 ['batch_normalization_19[0][0]']
activation_21 (Activation) (None, 7, 7, 64) 0 ['batch_normalization_21[0][0]']
activation_24 (Activation) (None, 7, 7, 96) 0 ['batch_normalization_24[0][0]']
activation_25 (Activation) (None, 7, 7, 64) 0 ['batch_normalization_25[0][0]']
mixed2 (Concatenate) (None, 7, 7, 288) 0 ['activation_19[0][0]',
'activation_21[0][0]',
'activation_24[0][0]',
'activation_25[0][0]']
conv2d_27 (Conv2D) (None, 7, 7, 64) 18432 ['mixed2[0][0]']
batch_normalization_27 (BatchN (None, 7, 7, 64) 192 ['conv2d_27[0][0]']
ormalization)
activation_27 (Activation) (None, 7, 7, 64) 0 ['batch_normalization_27[0][0]']
conv2d_28 (Conv2D) (None, 7, 7, 96) 55296 ['activation_27[0][0]']
batch_normalization_28 (BatchN (None, 7, 7, 96) 288 ['conv2d_28[0][0]']
ormalization)
activation_28 (Activation) (None, 7, 7, 96) 0 ['batch_normalization_28[0][0]']
conv2d_26 (Conv2D) (None, 3, 3, 384) 995328 ['mixed2[0][0]']
conv2d_29 (Conv2D) (None, 3, 3, 96) 82944 ['activation_28[0][0]']
batch_normalization_26 (BatchN (None, 3, 3, 384) 1152 ['conv2d_26[0][0]']
ormalization)
batch_normalization_29 (BatchN (None, 3, 3, 96) 288 ['conv2d_29[0][0]']
ormalization)
activation_26 (Activation) (None, 3, 3, 384) 0 ['batch_normalization_26[0][0]']
activation_29 (Activation) (None, 3, 3, 96) 0 ['batch_normalization_29[0][0]']
max_pooling2d_2 (MaxPooling2D) (None, 3, 3, 288) 0 ['mixed2[0][0]']
mixed3 (Concatenate) (None, 3, 3, 768) 0 ['activation_26[0][0]',
'activation_29[0][0]',
'max_pooling2d_2[0][0]']
conv2d_34 (Conv2D) (None, 3, 3, 128) 98304 ['mixed3[0][0]']
batch_normalization_34 (BatchN (None, 3, 3, 128) 384 ['conv2d_34[0][0]']
ormalization)
activation_34 (Activation) (None, 3, 3, 128) 0 ['batch_normalization_34[0][0]']
conv2d_35 (Conv2D) (None, 3, 3, 128) 114688 ['activation_34[0][0]']
batch_normalization_35 (BatchN (None, 3, 3, 128) 384 ['conv2d_35[0][0]']
ormalization)
activation_35 (Activation) (None, 3, 3, 128) 0 ['batch_normalization_35[0][0]']
conv2d_31 (Conv2D) (None, 3, 3, 128) 98304 ['mixed3[0][0]']
conv2d_36 (Conv2D) (None, 3, 3, 128) 114688 ['activation_35[0][0]']
batch_normalization_31 (BatchN (None, 3, 3, 128) 384 ['conv2d_31[0][0]']
ormalization)
batch_normalization_36 (BatchN (None, 3, 3, 128) 384 ['conv2d_36[0][0]']
ormalization)
activation_31 (Activation) (None, 3, 3, 128) 0 ['batch_normalization_31[0][0]']
activation_36 (Activation) (None, 3, 3, 128) 0 ['batch_normalization_36[0][0]']
conv2d_32 (Conv2D) (None, 3, 3, 128) 114688 ['activation_31[0][0]']
conv2d_37 (Conv2D) (None, 3, 3, 128) 114688 ['activation_36[0][0]']
batch_normalization_32 (BatchN (None, 3, 3, 128) 384 ['conv2d_32[0][0]']
ormalization)
batch_normalization_37 (BatchN (None, 3, 3, 128) 384 ['conv2d_37[0][0]']
ormalization)
activation_32 (Activation) (None, 3, 3, 128) 0 ['batch_normalization_32[0][0]']
activation_37 (Activation) (None, 3, 3, 128) 0 ['batch_normalization_37[0][0]']
average_pooling2d_3 (AveragePo (None, 3, 3, 768) 0 ['mixed3[0][0]']
oling2D)
conv2d_30 (Conv2D) (None, 3, 3, 192) 147456 ['mixed3[0][0]']
conv2d_33 (Conv2D) (None, 3, 3, 192) 172032 ['activation_32[0][0]']
conv2d_38 (Conv2D) (None, 3, 3, 192) 172032 ['activation_37[0][0]']
conv2d_39 (Conv2D) (None, 3, 3, 192) 147456 ['average_pooling2d_3[0][0]']
batch_normalization_30 (BatchN (None, 3, 3, 192) 576 ['conv2d_30[0][0]']
ormalization)
batch_normalization_33 (BatchN (None, 3, 3, 192) 576 ['conv2d_33[0][0]']
ormalization)
batch_normalization_38 (BatchN (None, 3, 3, 192) 576 ['conv2d_38[0][0]']
ormalization)
batch_normalization_39 (BatchN (None, 3, 3, 192) 576 ['conv2d_39[0][0]']
ormalization)
activation_30 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_30[0][0]']
activation_33 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_33[0][0]']
activation_38 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_38[0][0]']
activation_39 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_39[0][0]']
mixed4 (Concatenate) (None, 3, 3, 768) 0 ['activation_30[0][0]',
'activation_33[0][0]',
'activation_38[0][0]',
'activation_39[0][0]']
conv2d_44 (Conv2D) (None, 3, 3, 160) 122880 ['mixed4[0][0]']
batch_normalization_44 (BatchN (None, 3, 3, 160) 480 ['conv2d_44[0][0]']
ormalization)
activation_44 (Activation) (None, 3, 3, 160) 0 ['batch_normalization_44[0][0]']
conv2d_45 (Conv2D) (None, 3, 3, 160) 179200 ['activation_44[0][0]']
batch_normalization_45 (BatchN (None, 3, 3, 160) 480 ['conv2d_45[0][0]']
ormalization)
activation_45 (Activation) (None, 3, 3, 160) 0 ['batch_normalization_45[0][0]']
conv2d_41 (Conv2D) (None, 3, 3, 160) 122880 ['mixed4[0][0]']
conv2d_46 (Conv2D) (None, 3, 3, 160) 179200 ['activation_45[0][0]']
batch_normalization_41 (BatchN (None, 3, 3, 160) 480 ['conv2d_41[0][0]']
ormalization)
batch_normalization_46 (BatchN (None, 3, 3, 160) 480 ['conv2d_46[0][0]']
ormalization)
activation_41 (Activation) (None, 3, 3, 160) 0 ['batch_normalization_41[0][0]']
activation_46 (Activation) (None, 3, 3, 160) 0 ['batch_normalization_46[0][0]']
conv2d_42 (Conv2D) (None, 3, 3, 160) 179200 ['activation_41[0][0]']
conv2d_47 (Conv2D) (None, 3, 3, 160) 179200 ['activation_46[0][0]']
batch_normalization_42 (BatchN (None, 3, 3, 160) 480 ['conv2d_42[0][0]']
ormalization)
batch_normalization_47 (BatchN (None, 3, 3, 160) 480 ['conv2d_47[0][0]']
ormalization)
activation_42 (Activation) (None, 3, 3, 160) 0 ['batch_normalization_42[0][0]']
activation_47 (Activation) (None, 3, 3, 160) 0 ['batch_normalization_47[0][0]']
average_pooling2d_4 (AveragePo (None, 3, 3, 768) 0 ['mixed4[0][0]']
oling2D)
conv2d_40 (Conv2D) (None, 3, 3, 192) 147456 ['mixed4[0][0]']
conv2d_43 (Conv2D) (None, 3, 3, 192) 215040 ['activation_42[0][0]']
conv2d_48 (Conv2D) (None, 3, 3, 192) 215040 ['activation_47[0][0]']
conv2d_49 (Conv2D) (None, 3, 3, 192) 147456 ['average_pooling2d_4[0][0]']
batch_normalization_40 (BatchN (None, 3, 3, 192) 576 ['conv2d_40[0][0]']
ormalization)
batch_normalization_43 (BatchN (None, 3, 3, 192) 576 ['conv2d_43[0][0]']
ormalization)
batch_normalization_48 (BatchN (None, 3, 3, 192) 576 ['conv2d_48[0][0]']
ormalization)
batch_normalization_49 (BatchN (None, 3, 3, 192) 576 ['conv2d_49[0][0]']
ormalization)
activation_40 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_40[0][0]']
activation_43 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_43[0][0]']
activation_48 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_48[0][0]']
activation_49 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_49[0][0]']
mixed5 (Concatenate) (None, 3, 3, 768) 0 ['activation_40[0][0]',
'activation_43[0][0]',
'activation_48[0][0]',
'activation_49[0][0]']
conv2d_54 (Conv2D) (None, 3, 3, 160) 122880 ['mixed5[0][0]']
batch_normalization_54 (BatchN (None, 3, 3, 160) 480 ['conv2d_54[0][0]']
ormalization)
activation_54 (Activation) (None, 3, 3, 160) 0 ['batch_normalization_54[0][0]']
conv2d_55 (Conv2D) (None, 3, 3, 160) 179200 ['activation_54[0][0]']
batch_normalization_55 (BatchN (None, 3, 3, 160) 480 ['conv2d_55[0][0]']
ormalization)
activation_55 (Activation) (None, 3, 3, 160) 0 ['batch_normalization_55[0][0]']
conv2d_51 (Conv2D) (None, 3, 3, 160) 122880 ['mixed5[0][0]']
conv2d_56 (Conv2D) (None, 3, 3, 160) 179200 ['activation_55[0][0]']
batch_normalization_51 (BatchN (None, 3, 3, 160) 480 ['conv2d_51[0][0]']
ormalization)
batch_normalization_56 (BatchN (None, 3, 3, 160) 480 ['conv2d_56[0][0]']
ormalization)
activation_51 (Activation) (None, 3, 3, 160) 0 ['batch_normalization_51[0][0]']
activation_56 (Activation) (None, 3, 3, 160) 0 ['batch_normalization_56[0][0]']
conv2d_52 (Conv2D) (None, 3, 3, 160) 179200 ['activation_51[0][0]']
conv2d_57 (Conv2D) (None, 3, 3, 160) 179200 ['activation_56[0][0]']
batch_normalization_52 (BatchN (None, 3, 3, 160) 480 ['conv2d_52[0][0]']
ormalization)
batch_normalization_57 (BatchN (None, 3, 3, 160) 480 ['conv2d_57[0][0]']
ormalization)
activation_52 (Activation) (None, 3, 3, 160) 0 ['batch_normalization_52[0][0]']
activation_57 (Activation) (None, 3, 3, 160) 0 ['batch_normalization_57[0][0]']
average_pooling2d_5 (AveragePo (None, 3, 3, 768) 0 ['mixed5[0][0]']
oling2D)
conv2d_50 (Conv2D) (None, 3, 3, 192) 147456 ['mixed5[0][0]']
conv2d_53 (Conv2D) (None, 3, 3, 192) 215040 ['activation_52[0][0]']
conv2d_58 (Conv2D) (None, 3, 3, 192) 215040 ['activation_57[0][0]']
conv2d_59 (Conv2D) (None, 3, 3, 192) 147456 ['average_pooling2d_5[0][0]']
batch_normalization_50 (BatchN (None, 3, 3, 192) 576 ['conv2d_50[0][0]']
ormalization)
batch_normalization_53 (BatchN (None, 3, 3, 192) 576 ['conv2d_53[0][0]']
ormalization)
batch_normalization_58 (BatchN (None, 3, 3, 192) 576 ['conv2d_58[0][0]']
ormalization)
batch_normalization_59 (BatchN (None, 3, 3, 192) 576 ['conv2d_59[0][0]']
ormalization)
activation_50 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_50[0][0]']
activation_53 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_53[0][0]']
activation_58 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_58[0][0]']
activation_59 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_59[0][0]']
mixed6 (Concatenate) (None, 3, 3, 768) 0 ['activation_50[0][0]',
'activation_53[0][0]',
'activation_58[0][0]',
'activation_59[0][0]']
conv2d_64 (Conv2D) (None, 3, 3, 192) 147456 ['mixed6[0][0]']
batch_normalization_64 (BatchN (None, 3, 3, 192) 576 ['conv2d_64[0][0]']
ormalization)
activation_64 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_64[0][0]']
conv2d_65 (Conv2D) (None, 3, 3, 192) 258048 ['activation_64[0][0]']
batch_normalization_65 (BatchN (None, 3, 3, 192) 576 ['conv2d_65[0][0]']
ormalization)
activation_65 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_65[0][0]']
conv2d_61 (Conv2D) (None, 3, 3, 192) 147456 ['mixed6[0][0]']
conv2d_66 (Conv2D) (None, 3, 3, 192) 258048 ['activation_65[0][0]']
batch_normalization_61 (BatchN (None, 3, 3, 192) 576 ['conv2d_61[0][0]']
ormalization)
batch_normalization_66 (BatchN (None, 3, 3, 192) 576 ['conv2d_66[0][0]']
ormalization)
activation_61 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_61[0][0]']
activation_66 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_66[0][0]']
conv2d_62 (Conv2D) (None, 3, 3, 192) 258048 ['activation_61[0][0]']
conv2d_67 (Conv2D) (None, 3, 3, 192) 258048 ['activation_66[0][0]']
batch_normalization_62 (BatchN (None, 3, 3, 192) 576 ['conv2d_62[0][0]']
ormalization)
batch_normalization_67 (BatchN (None, 3, 3, 192) 576 ['conv2d_67[0][0]']
ormalization)
activation_62 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_62[0][0]']
activation_67 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_67[0][0]']
average_pooling2d_6 (AveragePo (None, 3, 3, 768) 0 ['mixed6[0][0]']
oling2D)
conv2d_60 (Conv2D) (None, 3, 3, 192) 147456 ['mixed6[0][0]']
conv2d_63 (Conv2D) (None, 3, 3, 192) 258048 ['activation_62[0][0]']
conv2d_68 (Conv2D) (None, 3, 3, 192) 258048 ['activation_67[0][0]']
conv2d_69 (Conv2D) (None, 3, 3, 192) 147456 ['average_pooling2d_6[0][0]']
batch_normalization_60 (BatchN (None, 3, 3, 192) 576 ['conv2d_60[0][0]']
ormalization)
batch_normalization_63 (BatchN (None, 3, 3, 192) 576 ['conv2d_63[0][0]']
ormalization)
batch_normalization_68 (BatchN (None, 3, 3, 192) 576 ['conv2d_68[0][0]']
ormalization)
batch_normalization_69 (BatchN (None, 3, 3, 192) 576 ['conv2d_69[0][0]']
ormalization)
activation_60 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_60[0][0]']
activation_63 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_63[0][0]']
activation_68 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_68[0][0]']
activation_69 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_69[0][0]']
mixed7 (Concatenate) (None, 3, 3, 768) 0 ['activation_60[0][0]',
'activation_63[0][0]',
'activation_68[0][0]',
'activation_69[0][0]']
conv2d_72 (Conv2D) (None, 3, 3, 192) 147456 ['mixed7[0][0]']
batch_normalization_72 (BatchN (None, 3, 3, 192) 576 ['conv2d_72[0][0]']
ormalization)
activation_72 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_72[0][0]']
conv2d_73 (Conv2D) (None, 3, 3, 192) 258048 ['activation_72[0][0]']
batch_normalization_73 (BatchN (None, 3, 3, 192) 576 ['conv2d_73[0][0]']
ormalization)
activation_73 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_73[0][0]']
conv2d_70 (Conv2D) (None, 3, 3, 192) 147456 ['mixed7[0][0]']
conv2d_74 (Conv2D) (None, 3, 3, 192) 258048 ['activation_73[0][0]']
batch_normalization_70 (BatchN (None, 3, 3, 192) 576 ['conv2d_70[0][0]']
ormalization)
batch_normalization_74 (BatchN (None, 3, 3, 192) 576 ['conv2d_74[0][0]']
ormalization)
activation_70 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_70[0][0]']
activation_74 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_74[0][0]']
conv2d_71 (Conv2D) (None, 1, 1, 320) 552960 ['activation_70[0][0]']
conv2d_75 (Conv2D) (None, 1, 1, 192) 331776 ['activation_74[0][0]']
batch_normalization_71 (BatchN (None, 1, 1, 320) 960 ['conv2d_71[0][0]']
ormalization)
batch_normalization_75 (BatchN (None, 1, 1, 192) 576 ['conv2d_75[0][0]']
ormalization)
activation_71 (Activation) (None, 1, 1, 320) 0 ['batch_normalization_71[0][0]']
activation_75 (Activation) (None, 1, 1, 192) 0 ['batch_normalization_75[0][0]']
max_pooling2d_3 (MaxPooling2D) (None, 1, 1, 768) 0 ['mixed7[0][0]']
mixed8 (Concatenate) (None, 1, 1, 1280) 0 ['activation_71[0][0]',
'activation_75[0][0]',
'max_pooling2d_3[0][0]']
conv2d_80 (Conv2D) (None, 1, 1, 448) 573440 ['mixed8[0][0]']
batch_normalization_80 (BatchN (None, 1, 1, 448) 1344 ['conv2d_80[0][0]']
ormalization)
activation_80 (Activation) (None, 1, 1, 448) 0 ['batch_normalization_80[0][0]']
conv2d_77 (Conv2D) (None, 1, 1, 384) 491520 ['mixed8[0][0]']
conv2d_81 (Conv2D) (None, 1, 1, 384) 1548288 ['activation_80[0][0]']
batch_normalization_77 (BatchN (None, 1, 1, 384) 1152 ['conv2d_77[0][0]']
ormalization)
batch_normalization_81 (BatchN (None, 1, 1, 384) 1152 ['conv2d_81[0][0]']
ormalization)
activation_77 (Activation) (None, 1, 1, 384) 0 ['batch_normalization_77[0][0]']
activation_81 (Activation) (None, 1, 1, 384) 0 ['batch_normalization_81[0][0]']
conv2d_78 (Conv2D) (None, 1, 1, 384) 442368 ['activation_77[0][0]']
conv2d_79 (Conv2D) (None, 1, 1, 384) 442368 ['activation_77[0][0]']
conv2d_82 (Conv2D) (None, 1, 1, 384) 442368 ['activation_81[0][0]']
conv2d_83 (Conv2D) (None, 1, 1, 384) 442368 ['activation_81[0][0]']
average_pooling2d_7 (AveragePo (None, 1, 1, 1280) 0 ['mixed8[0][0]']
oling2D)
conv2d_76 (Conv2D) (None, 1, 1, 320) 409600 ['mixed8[0][0]']
batch_normalization_78 (BatchN (None, 1, 1, 384) 1152 ['conv2d_78[0][0]']
ormalization)
batch_normalization_79 (BatchN (None, 1, 1, 384) 1152 ['conv2d_79[0][0]']
ormalization)
batch_normalization_82 (BatchN (None, 1, 1, 384) 1152 ['conv2d_82[0][0]']
ormalization)
batch_normalization_83 (BatchN (None, 1, 1, 384) 1152 ['conv2d_83[0][0]']
ormalization)
conv2d_84 (Conv2D) (None, 1, 1, 192) 245760 ['average_pooling2d_7[0][0]']
batch_normalization_76 (BatchN (None, 1, 1, 320) 960 ['conv2d_76[0][0]']
ormalization)
activation_78 (Activation) (None, 1, 1, 384) 0 ['batch_normalization_78[0][0]']
activation_79 (Activation) (None, 1, 1, 384) 0 ['batch_normalization_79[0][0]']
activation_82 (Activation) (None, 1, 1, 384) 0 ['batch_normalization_82[0][0]']
activation_83 (Activation) (None, 1, 1, 384) 0 ['batch_normalization_83[0][0]']
batch_normalization_84 (BatchN (None, 1, 1, 192) 576 ['conv2d_84[0][0]']
ormalization)
activation_76 (Activation) (None, 1, 1, 320) 0 ['batch_normalization_76[0][0]']
mixed9_0 (Concatenate) (None, 1, 1, 768) 0 ['activation_78[0][0]',
'activation_79[0][0]']
concatenate (Concatenate) (None, 1, 1, 768) 0 ['activation_82[0][0]',
'activation_83[0][0]']
activation_84 (Activation) (None, 1, 1, 192) 0 ['batch_normalization_84[0][0]']
mixed9 (Concatenate) (None, 1, 1, 2048) 0 ['activation_76[0][0]',
'mixed9_0[0][0]',
'concatenate[0][0]',
'activation_84[0][0]']
conv2d_89 (Conv2D) (None, 1, 1, 448) 917504 ['mixed9[0][0]']
batch_normalization_89 (BatchN (None, 1, 1, 448) 1344 ['conv2d_89[0][0]']
ormalization)
activation_89 (Activation) (None, 1, 1, 448) 0 ['batch_normalization_89[0][0]']
conv2d_86 (Conv2D) (None, 1, 1, 384) 786432 ['mixed9[0][0]']
conv2d_90 (Conv2D) (None, 1, 1, 384) 1548288 ['activation_89[0][0]']
batch_normalization_86 (BatchN (None, 1, 1, 384) 1152 ['conv2d_86[0][0]']
ormalization)
batch_normalization_90 (BatchN (None, 1, 1, 384) 1152 ['conv2d_90[0][0]']
ormalization)
activation_86 (Activation) (None, 1, 1, 384) 0 ['batch_normalization_86[0][0]']
activation_90 (Activation) (None, 1, 1, 384) 0 ['batch_normalization_90[0][0]']
conv2d_87 (Conv2D) (None, 1, 1, 384) 442368 ['activation_86[0][0]']
conv2d_88 (Conv2D) (None, 1, 1, 384) 442368 ['activation_86[0][0]']
conv2d_91 (Conv2D) (None, 1, 1, 384) 442368 ['activation_90[0][0]']
conv2d_92 (Conv2D) (None, 1, 1, 384) 442368 ['activation_90[0][0]']
average_pooling2d_8 (AveragePo (None, 1, 1, 2048) 0 ['mixed9[0][0]']
oling2D)
conv2d_85 (Conv2D) (None, 1, 1, 320) 655360 ['mixed9[0][0]']
batch_normalization_87 (BatchN (None, 1, 1, 384) 1152 ['conv2d_87[0][0]']
ormalization)
batch_normalization_88 (BatchN (None, 1, 1, 384) 1152 ['conv2d_88[0][0]']
ormalization)
batch_normalization_91 (BatchN (None, 1, 1, 384) 1152 ['conv2d_91[0][0]']
ormalization)
batch_normalization_92 (BatchN (None, 1, 1, 384) 1152 ['conv2d_92[0][0]']
ormalization)
conv2d_93 (Conv2D) (None, 1, 1, 192) 393216 ['average_pooling2d_8[0][0]']
batch_normalization_85 (BatchN (None, 1, 1, 320) 960 ['conv2d_85[0][0]']
ormalization)
activation_87 (Activation) (None, 1, 1, 384) 0 ['batch_normalization_87[0][0]']
activation_88 (Activation) (None, 1, 1, 384) 0 ['batch_normalization_88[0][0]']
activation_91 (Activation) (None, 1, 1, 384) 0 ['batch_normalization_91[0][0]']
activation_92 (Activation) (None, 1, 1, 384) 0 ['batch_normalization_92[0][0]']
batch_normalization_93 (BatchN (None, 1, 1, 192) 576 ['conv2d_93[0][0]']
ormalization)
activation_85 (Activation) (None, 1, 1, 320) 0 ['batch_normalization_85[0][0]']
mixed9_1 (Concatenate) (None, 1, 1, 768) 0 ['activation_87[0][0]',
'activation_88[0][0]']
concatenate_1 (Concatenate) (None, 1, 1, 768) 0 ['activation_91[0][0]',
'activation_92[0][0]']
activation_93 (Activation) (None, 1, 1, 192) 0 ['batch_normalization_93[0][0]']
mixed10 (Concatenate) (None, 1, 1, 2048) 0 ['activation_85[0][0]',
'mixed9_1[0][0]',
'concatenate_1[0][0]',
'activation_93[0][0]']
==================================================================================================
Total params: 21,802,784
Trainable params: 21,768,352
Non-trainable params: 34,432
__________________________________________________________________________________________________
Model: "model"
__________________________________________________________________________________________________
Layer (type) Output Shape Param # Connected to
==================================================================================================
input_1 (InputLayer) [(None, 75, 75, 3)] 0 []
conv2d (Conv2D) (None, 37, 37, 32) 864 ['input_1[0][0]']
batch_normalization (BatchNorm (None, 37, 37, 32) 96 ['conv2d[0][0]']
alization)
activation (Activation) (None, 37, 37, 32) 0 ['batch_normalization[0][0]']
conv2d_1 (Conv2D) (None, 35, 35, 32) 9216 ['activation[0][0]']
batch_normalization_1 (BatchNo (None, 35, 35, 32) 96 ['conv2d_1[0][0]']
rmalization)
activation_1 (Activation) (None, 35, 35, 32) 0 ['batch_normalization_1[0][0]']
conv2d_2 (Conv2D) (None, 35, 35, 64) 18432 ['activation_1[0][0]']
batch_normalization_2 (BatchNo (None, 35, 35, 64) 192 ['conv2d_2[0][0]']
rmalization)
activation_2 (Activation) (None, 35, 35, 64) 0 ['batch_normalization_2[0][0]']
max_pooling2d (MaxPooling2D) (None, 17, 17, 64) 0 ['activation_2[0][0]']
conv2d_3 (Conv2D) (None, 17, 17, 80) 5120 ['max_pooling2d[0][0]']
batch_normalization_3 (BatchNo (None, 17, 17, 80) 240 ['conv2d_3[0][0]']
rmalization)
activation_3 (Activation) (None, 17, 17, 80) 0 ['batch_normalization_3[0][0]']
conv2d_4 (Conv2D) (None, 15, 15, 192) 138240 ['activation_3[0][0]']
batch_normalization_4 (BatchNo (None, 15, 15, 192) 576 ['conv2d_4[0][0]']
rmalization)
activation_4 (Activation) (None, 15, 15, 192) 0 ['batch_normalization_4[0][0]']
max_pooling2d_1 (MaxPooling2D) (None, 7, 7, 192) 0 ['activation_4[0][0]']
conv2d_8 (Conv2D) (None, 7, 7, 64) 12288 ['max_pooling2d_1[0][0]']
batch_normalization_8 (BatchNo (None, 7, 7, 64) 192 ['conv2d_8[0][0]']
rmalization)
activation_8 (Activation) (None, 7, 7, 64) 0 ['batch_normalization_8[0][0]']
conv2d_6 (Conv2D) (None, 7, 7, 48) 9216 ['max_pooling2d_1[0][0]']
conv2d_9 (Conv2D) (None, 7, 7, 96) 55296 ['activation_8[0][0]']
batch_normalization_6 (BatchNo (None, 7, 7, 48) 144 ['conv2d_6[0][0]']
rmalization)
batch_normalization_9 (BatchNo (None, 7, 7, 96) 288 ['conv2d_9[0][0]']
rmalization)
activation_6 (Activation) (None, 7, 7, 48) 0 ['batch_normalization_6[0][0]']
activation_9 (Activation) (None, 7, 7, 96) 0 ['batch_normalization_9[0][0]']
average_pooling2d (AveragePool (None, 7, 7, 192) 0 ['max_pooling2d_1[0][0]']
ing2D)
conv2d_5 (Conv2D) (None, 7, 7, 64) 12288 ['max_pooling2d_1[0][0]']
conv2d_7 (Conv2D) (None, 7, 7, 64) 76800 ['activation_6[0][0]']
conv2d_10 (Conv2D) (None, 7, 7, 96) 82944 ['activation_9[0][0]']
conv2d_11 (Conv2D) (None, 7, 7, 32) 6144 ['average_pooling2d[0][0]']
batch_normalization_5 (BatchNo (None, 7, 7, 64) 192 ['conv2d_5[0][0]']
rmalization)
batch_normalization_7 (BatchNo (None, 7, 7, 64) 192 ['conv2d_7[0][0]']
rmalization)
batch_normalization_10 (BatchN (None, 7, 7, 96) 288 ['conv2d_10[0][0]']
ormalization)
batch_normalization_11 (BatchN (None, 7, 7, 32) 96 ['conv2d_11[0][0]']
ormalization)
activation_5 (Activation) (None, 7, 7, 64) 0 ['batch_normalization_5[0][0]']
activation_7 (Activation) (None, 7, 7, 64) 0 ['batch_normalization_7[0][0]']
activation_10 (Activation) (None, 7, 7, 96) 0 ['batch_normalization_10[0][0]']
activation_11 (Activation) (None, 7, 7, 32) 0 ['batch_normalization_11[0][0]']
mixed0 (Concatenate) (None, 7, 7, 256) 0 ['activation_5[0][0]',
'activation_7[0][0]',
'activation_10[0][0]',
'activation_11[0][0]']
conv2d_15 (Conv2D) (None, 7, 7, 64) 16384 ['mixed0[0][0]']
batch_normalization_15 (BatchN (None, 7, 7, 64) 192 ['conv2d_15[0][0]']
ormalization)
activation_15 (Activation) (None, 7, 7, 64) 0 ['batch_normalization_15[0][0]']
conv2d_13 (Conv2D) (None, 7, 7, 48) 12288 ['mixed0[0][0]']
conv2d_16 (Conv2D) (None, 7, 7, 96) 55296 ['activation_15[0][0]']
batch_normalization_13 (BatchN (None, 7, 7, 48) 144 ['conv2d_13[0][0]']
ormalization)
batch_normalization_16 (BatchN (None, 7, 7, 96) 288 ['conv2d_16[0][0]']
ormalization)
activation_13 (Activation) (None, 7, 7, 48) 0 ['batch_normalization_13[0][0]']
activation_16 (Activation) (None, 7, 7, 96) 0 ['batch_normalization_16[0][0]']
average_pooling2d_1 (AveragePo (None, 7, 7, 256) 0 ['mixed0[0][0]']
oling2D)
conv2d_12 (Conv2D) (None, 7, 7, 64) 16384 ['mixed0[0][0]']
conv2d_14 (Conv2D) (None, 7, 7, 64) 76800 ['activation_13[0][0]']
conv2d_17 (Conv2D) (None, 7, 7, 96) 82944 ['activation_16[0][0]']
conv2d_18 (Conv2D) (None, 7, 7, 64) 16384 ['average_pooling2d_1[0][0]']
batch_normalization_12 (BatchN (None, 7, 7, 64) 192 ['conv2d_12[0][0]']
ormalization)
batch_normalization_14 (BatchN (None, 7, 7, 64) 192 ['conv2d_14[0][0]']
ormalization)
batch_normalization_17 (BatchN (None, 7, 7, 96) 288 ['conv2d_17[0][0]']
ormalization)
batch_normalization_18 (BatchN (None, 7, 7, 64) 192 ['conv2d_18[0][0]']
ormalization)
activation_12 (Activation) (None, 7, 7, 64) 0 ['batch_normalization_12[0][0]']
activation_14 (Activation) (None, 7, 7, 64) 0 ['batch_normalization_14[0][0]']
activation_17 (Activation) (None, 7, 7, 96) 0 ['batch_normalization_17[0][0]']
activation_18 (Activation) (None, 7, 7, 64) 0 ['batch_normalization_18[0][0]']
mixed1 (Concatenate) (None, 7, 7, 288) 0 ['activation_12[0][0]',
'activation_14[0][0]',
'activation_17[0][0]',
'activation_18[0][0]']
conv2d_22 (Conv2D) (None, 7, 7, 64) 18432 ['mixed1[0][0]']
batch_normalization_22 (BatchN (None, 7, 7, 64) 192 ['conv2d_22[0][0]']
ormalization)
activation_22 (Activation) (None, 7, 7, 64) 0 ['batch_normalization_22[0][0]']
conv2d_20 (Conv2D) (None, 7, 7, 48) 13824 ['mixed1[0][0]']
conv2d_23 (Conv2D) (None, 7, 7, 96) 55296 ['activation_22[0][0]']
batch_normalization_20 (BatchN (None, 7, 7, 48) 144 ['conv2d_20[0][0]']
ormalization)
batch_normalization_23 (BatchN (None, 7, 7, 96) 288 ['conv2d_23[0][0]']
ormalization)
activation_20 (Activation) (None, 7, 7, 48) 0 ['batch_normalization_20[0][0]']
activation_23 (Activation) (None, 7, 7, 96) 0 ['batch_normalization_23[0][0]']
average_pooling2d_2 (AveragePo (None, 7, 7, 288) 0 ['mixed1[0][0]']
oling2D)
conv2d_19 (Conv2D) (None, 7, 7, 64) 18432 ['mixed1[0][0]']
conv2d_21 (Conv2D) (None, 7, 7, 64) 76800 ['activation_20[0][0]']
conv2d_24 (Conv2D) (None, 7, 7, 96) 82944 ['activation_23[0][0]']
conv2d_25 (Conv2D) (None, 7, 7, 64) 18432 ['average_pooling2d_2[0][0]']
batch_normalization_19 (BatchN (None, 7, 7, 64) 192 ['conv2d_19[0][0]']
ormalization)
batch_normalization_21 (BatchN (None, 7, 7, 64) 192 ['conv2d_21[0][0]']
ormalization)
batch_normalization_24 (BatchN (None, 7, 7, 96) 288 ['conv2d_24[0][0]']
ormalization)
batch_normalization_25 (BatchN (None, 7, 7, 64) 192 ['conv2d_25[0][0]']
ormalization)
activation_19 (Activation) (None, 7, 7, 64) 0 ['batch_normalization_19[0][0]']
activation_21 (Activation) (None, 7, 7, 64) 0 ['batch_normalization_21[0][0]']
activation_24 (Activation) (None, 7, 7, 96) 0 ['batch_normalization_24[0][0]']
activation_25 (Activation) (None, 7, 7, 64) 0 ['batch_normalization_25[0][0]']
mixed2 (Concatenate) (None, 7, 7, 288) 0 ['activation_19[0][0]',
'activation_21[0][0]',
'activation_24[0][0]',
'activation_25[0][0]']
conv2d_27 (Conv2D) (None, 7, 7, 64) 18432 ['mixed2[0][0]']
batch_normalization_27 (BatchN (None, 7, 7, 64) 192 ['conv2d_27[0][0]']
ormalization)
activation_27 (Activation) (None, 7, 7, 64) 0 ['batch_normalization_27[0][0]']
conv2d_28 (Conv2D) (None, 7, 7, 96) 55296 ['activation_27[0][0]']
batch_normalization_28 (BatchN (None, 7, 7, 96) 288 ['conv2d_28[0][0]']
ormalization)
activation_28 (Activation) (None, 7, 7, 96) 0 ['batch_normalization_28[0][0]']
conv2d_26 (Conv2D) (None, 3, 3, 384) 995328 ['mixed2[0][0]']
conv2d_29 (Conv2D) (None, 3, 3, 96) 82944 ['activation_28[0][0]']
batch_normalization_26 (BatchN (None, 3, 3, 384) 1152 ['conv2d_26[0][0]']
ormalization)
batch_normalization_29 (BatchN (None, 3, 3, 96) 288 ['conv2d_29[0][0]']
ormalization)
activation_26 (Activation) (None, 3, 3, 384) 0 ['batch_normalization_26[0][0]']
activation_29 (Activation) (None, 3, 3, 96) 0 ['batch_normalization_29[0][0]']
max_pooling2d_2 (MaxPooling2D) (None, 3, 3, 288) 0 ['mixed2[0][0]']
mixed3 (Concatenate) (None, 3, 3, 768) 0 ['activation_26[0][0]',
'activation_29[0][0]',
'max_pooling2d_2[0][0]']
conv2d_34 (Conv2D) (None, 3, 3, 128) 98304 ['mixed3[0][0]']
batch_normalization_34 (BatchN (None, 3, 3, 128) 384 ['conv2d_34[0][0]']
ormalization)
activation_34 (Activation) (None, 3, 3, 128) 0 ['batch_normalization_34[0][0]']
conv2d_35 (Conv2D) (None, 3, 3, 128) 114688 ['activation_34[0][0]']
batch_normalization_35 (BatchN (None, 3, 3, 128) 384 ['conv2d_35[0][0]']
ormalization)
activation_35 (Activation) (None, 3, 3, 128) 0 ['batch_normalization_35[0][0]']
conv2d_31 (Conv2D) (None, 3, 3, 128) 98304 ['mixed3[0][0]']
conv2d_36 (Conv2D) (None, 3, 3, 128) 114688 ['activation_35[0][0]']
batch_normalization_31 (BatchN (None, 3, 3, 128) 384 ['conv2d_31[0][0]']
ormalization)
batch_normalization_36 (BatchN (None, 3, 3, 128) 384 ['conv2d_36[0][0]']
ormalization)
activation_31 (Activation) (None, 3, 3, 128) 0 ['batch_normalization_31[0][0]']
activation_36 (Activation) (None, 3, 3, 128) 0 ['batch_normalization_36[0][0]']
conv2d_32 (Conv2D) (None, 3, 3, 128) 114688 ['activation_31[0][0]']
conv2d_37 (Conv2D) (None, 3, 3, 128) 114688 ['activation_36[0][0]']
batch_normalization_32 (BatchN (None, 3, 3, 128) 384 ['conv2d_32[0][0]']
ormalization)
batch_normalization_37 (BatchN (None, 3, 3, 128) 384 ['conv2d_37[0][0]']
ormalization)
activation_32 (Activation) (None, 3, 3, 128) 0 ['batch_normalization_32[0][0]']
activation_37 (Activation) (None, 3, 3, 128) 0 ['batch_normalization_37[0][0]']
average_pooling2d_3 (AveragePo (None, 3, 3, 768) 0 ['mixed3[0][0]']
oling2D)
conv2d_30 (Conv2D) (None, 3, 3, 192) 147456 ['mixed3[0][0]']
conv2d_33 (Conv2D) (None, 3, 3, 192) 172032 ['activation_32[0][0]']
conv2d_38 (Conv2D) (None, 3, 3, 192) 172032 ['activation_37[0][0]']
conv2d_39 (Conv2D) (None, 3, 3, 192) 147456 ['average_pooling2d_3[0][0]']
batch_normalization_30 (BatchN (None, 3, 3, 192) 576 ['conv2d_30[0][0]']
ormalization)
batch_normalization_33 (BatchN (None, 3, 3, 192) 576 ['conv2d_33[0][0]']
ormalization)
batch_normalization_38 (BatchN (None, 3, 3, 192) 576 ['conv2d_38[0][0]']
ormalization)
batch_normalization_39 (BatchN (None, 3, 3, 192) 576 ['conv2d_39[0][0]']
ormalization)
activation_30 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_30[0][0]']
activation_33 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_33[0][0]']
activation_38 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_38[0][0]']
activation_39 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_39[0][0]']
mixed4 (Concatenate) (None, 3, 3, 768) 0 ['activation_30[0][0]',
'activation_33[0][0]',
'activation_38[0][0]',
'activation_39[0][0]']
conv2d_44 (Conv2D) (None, 3, 3, 160) 122880 ['mixed4[0][0]']
batch_normalization_44 (BatchN (None, 3, 3, 160) 480 ['conv2d_44[0][0]']
ormalization)
activation_44 (Activation) (None, 3, 3, 160) 0 ['batch_normalization_44[0][0]']
conv2d_45 (Conv2D) (None, 3, 3, 160) 179200 ['activation_44[0][0]']
batch_normalization_45 (BatchN (None, 3, 3, 160) 480 ['conv2d_45[0][0]']
ormalization)
activation_45 (Activation) (None, 3, 3, 160) 0 ['batch_normalization_45[0][0]']
conv2d_41 (Conv2D) (None, 3, 3, 160) 122880 ['mixed4[0][0]']
conv2d_46 (Conv2D) (None, 3, 3, 160) 179200 ['activation_45[0][0]']
batch_normalization_41 (BatchN (None, 3, 3, 160) 480 ['conv2d_41[0][0]']
ormalization)
batch_normalization_46 (BatchN (None, 3, 3, 160) 480 ['conv2d_46[0][0]']
ormalization)
activation_41 (Activation) (None, 3, 3, 160) 0 ['batch_normalization_41[0][0]']
activation_46 (Activation) (None, 3, 3, 160) 0 ['batch_normalization_46[0][0]']
conv2d_42 (Conv2D) (None, 3, 3, 160) 179200 ['activation_41[0][0]']
conv2d_47 (Conv2D) (None, 3, 3, 160) 179200 ['activation_46[0][0]']
batch_normalization_42 (BatchN (None, 3, 3, 160) 480 ['conv2d_42[0][0]']
ormalization)
batch_normalization_47 (BatchN (None, 3, 3, 160) 480 ['conv2d_47[0][0]']
ormalization)
activation_42 (Activation) (None, 3, 3, 160) 0 ['batch_normalization_42[0][0]']
activation_47 (Activation) (None, 3, 3, 160) 0 ['batch_normalization_47[0][0]']
average_pooling2d_4 (AveragePo (None, 3, 3, 768) 0 ['mixed4[0][0]']
oling2D)
conv2d_40 (Conv2D) (None, 3, 3, 192) 147456 ['mixed4[0][0]']
conv2d_43 (Conv2D) (None, 3, 3, 192) 215040 ['activation_42[0][0]']
conv2d_48 (Conv2D) (None, 3, 3, 192) 215040 ['activation_47[0][0]']
conv2d_49 (Conv2D) (None, 3, 3, 192) 147456 ['average_pooling2d_4[0][0]']
batch_normalization_40 (BatchN (None, 3, 3, 192) 576 ['conv2d_40[0][0]']
ormalization)
batch_normalization_43 (BatchN (None, 3, 3, 192) 576 ['conv2d_43[0][0]']
ormalization)
batch_normalization_48 (BatchN (None, 3, 3, 192) 576 ['conv2d_48[0][0]']
ormalization)
batch_normalization_49 (BatchN (None, 3, 3, 192) 576 ['conv2d_49[0][0]']
ormalization)
activation_40 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_40[0][0]']
activation_43 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_43[0][0]']
activation_48 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_48[0][0]']
activation_49 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_49[0][0]']
mixed5 (Concatenate) (None, 3, 3, 768) 0 ['activation_40[0][0]',
'activation_43[0][0]',
'activation_48[0][0]',
'activation_49[0][0]']
conv2d_54 (Conv2D) (None, 3, 3, 160) 122880 ['mixed5[0][0]']
batch_normalization_54 (BatchN (None, 3, 3, 160) 480 ['conv2d_54[0][0]']
ormalization)
activation_54 (Activation) (None, 3, 3, 160) 0 ['batch_normalization_54[0][0]']
conv2d_55 (Conv2D) (None, 3, 3, 160) 179200 ['activation_54[0][0]']
batch_normalization_55 (BatchN (None, 3, 3, 160) 480 ['conv2d_55[0][0]']
ormalization)
activation_55 (Activation) (None, 3, 3, 160) 0 ['batch_normalization_55[0][0]']
conv2d_51 (Conv2D) (None, 3, 3, 160) 122880 ['mixed5[0][0]']
conv2d_56 (Conv2D) (None, 3, 3, 160) 179200 ['activation_55[0][0]']
batch_normalization_51 (BatchN (None, 3, 3, 160) 480 ['conv2d_51[0][0]']
ormalization)
batch_normalization_56 (BatchN (None, 3, 3, 160) 480 ['conv2d_56[0][0]']
ormalization)
activation_51 (Activation) (None, 3, 3, 160) 0 ['batch_normalization_51[0][0]']
activation_56 (Activation) (None, 3, 3, 160) 0 ['batch_normalization_56[0][0]']
conv2d_52 (Conv2D) (None, 3, 3, 160) 179200 ['activation_51[0][0]']
conv2d_57 (Conv2D) (None, 3, 3, 160) 179200 ['activation_56[0][0]']
batch_normalization_52 (BatchN (None, 3, 3, 160) 480 ['conv2d_52[0][0]']
ormalization)
batch_normalization_57 (BatchN (None, 3, 3, 160) 480 ['conv2d_57[0][0]']
ormalization)
activation_52 (Activation) (None, 3, 3, 160) 0 ['batch_normalization_52[0][0]']
activation_57 (Activation) (None, 3, 3, 160) 0 ['batch_normalization_57[0][0]']
average_pooling2d_5 (AveragePo (None, 3, 3, 768) 0 ['mixed5[0][0]']
oling2D)
conv2d_50 (Conv2D) (None, 3, 3, 192) 147456 ['mixed5[0][0]']
conv2d_53 (Conv2D) (None, 3, 3, 192) 215040 ['activation_52[0][0]']
conv2d_58 (Conv2D) (None, 3, 3, 192) 215040 ['activation_57[0][0]']
conv2d_59 (Conv2D) (None, 3, 3, 192) 147456 ['average_pooling2d_5[0][0]']
batch_normalization_50 (BatchN (None, 3, 3, 192) 576 ['conv2d_50[0][0]']
ormalization)
batch_normalization_53 (BatchN (None, 3, 3, 192) 576 ['conv2d_53[0][0]']
ormalization)
batch_normalization_58 (BatchN (None, 3, 3, 192) 576 ['conv2d_58[0][0]']
ormalization)
batch_normalization_59 (BatchN (None, 3, 3, 192) 576 ['conv2d_59[0][0]']
ormalization)
activation_50 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_50[0][0]']
activation_53 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_53[0][0]']
activation_58 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_58[0][0]']
activation_59 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_59[0][0]']
mixed6 (Concatenate) (None, 3, 3, 768) 0 ['activation_50[0][0]',
'activation_53[0][0]',
'activation_58[0][0]',
'activation_59[0][0]']
conv2d_64 (Conv2D) (None, 3, 3, 192) 147456 ['mixed6[0][0]']
batch_normalization_64 (BatchN (None, 3, 3, 192) 576 ['conv2d_64[0][0]']
ormalization)
activation_64 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_64[0][0]']
conv2d_65 (Conv2D) (None, 3, 3, 192) 258048 ['activation_64[0][0]']
batch_normalization_65 (BatchN (None, 3, 3, 192) 576 ['conv2d_65[0][0]']
ormalization)
activation_65 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_65[0][0]']
conv2d_61 (Conv2D) (None, 3, 3, 192) 147456 ['mixed6[0][0]']
conv2d_66 (Conv2D) (None, 3, 3, 192) 258048 ['activation_65[0][0]']
batch_normalization_61 (BatchN (None, 3, 3, 192) 576 ['conv2d_61[0][0]']
ormalization)
batch_normalization_66 (BatchN (None, 3, 3, 192) 576 ['conv2d_66[0][0]']
ormalization)
activation_61 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_61[0][0]']
activation_66 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_66[0][0]']
conv2d_62 (Conv2D) (None, 3, 3, 192) 258048 ['activation_61[0][0]']
conv2d_67 (Conv2D) (None, 3, 3, 192) 258048 ['activation_66[0][0]']
batch_normalization_62 (BatchN (None, 3, 3, 192) 576 ['conv2d_62[0][0]']
ormalization)
batch_normalization_67 (BatchN (None, 3, 3, 192) 576 ['conv2d_67[0][0]']
ormalization)
activation_62 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_62[0][0]']
activation_67 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_67[0][0]']
average_pooling2d_6 (AveragePo (None, 3, 3, 768) 0 ['mixed6[0][0]']
oling2D)
conv2d_60 (Conv2D) (None, 3, 3, 192) 147456 ['mixed6[0][0]']
conv2d_63 (Conv2D) (None, 3, 3, 192) 258048 ['activation_62[0][0]']
conv2d_68 (Conv2D) (None, 3, 3, 192) 258048 ['activation_67[0][0]']
conv2d_69 (Conv2D) (None, 3, 3, 192) 147456 ['average_pooling2d_6[0][0]']
batch_normalization_60 (BatchN (None, 3, 3, 192) 576 ['conv2d_60[0][0]']
ormalization)
batch_normalization_63 (BatchN (None, 3, 3, 192) 576 ['conv2d_63[0][0]']
ormalization)
batch_normalization_68 (BatchN (None, 3, 3, 192) 576 ['conv2d_68[0][0]']
ormalization)
batch_normalization_69 (BatchN (None, 3, 3, 192) 576 ['conv2d_69[0][0]']
ormalization)
activation_60 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_60[0][0]']
activation_63 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_63[0][0]']
activation_68 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_68[0][0]']
activation_69 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_69[0][0]']
mixed7 (Concatenate) (None, 3, 3, 768) 0 ['activation_60[0][0]',
'activation_63[0][0]',
'activation_68[0][0]',
'activation_69[0][0]']
conv2d_72 (Conv2D) (None, 3, 3, 192) 147456 ['mixed7[0][0]']
batch_normalization_72 (BatchN (None, 3, 3, 192) 576 ['conv2d_72[0][0]']
ormalization)
activation_72 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_72[0][0]']
conv2d_73 (Conv2D) (None, 3, 3, 192) 258048 ['activation_72[0][0]']
batch_normalization_73 (BatchN (None, 3, 3, 192) 576 ['conv2d_73[0][0]']
ormalization)
activation_73 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_73[0][0]']
conv2d_70 (Conv2D) (None, 3, 3, 192) 147456 ['mixed7[0][0]']
conv2d_74 (Conv2D) (None, 3, 3, 192) 258048 ['activation_73[0][0]']
batch_normalization_70 (BatchN (None, 3, 3, 192) 576 ['conv2d_70[0][0]']
ormalization)
batch_normalization_74 (BatchN (None, 3, 3, 192) 576 ['conv2d_74[0][0]']
ormalization)
activation_70 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_70[0][0]']
activation_74 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_74[0][0]']
conv2d_71 (Conv2D) (None, 1, 1, 320) 552960 ['activation_70[0][0]']
conv2d_75 (Conv2D) (None, 1, 1, 192) 331776 ['activation_74[0][0]']
batch_normalization_71 (BatchN (None, 1, 1, 320) 960 ['conv2d_71[0][0]']
ormalization)
batch_normalization_75 (BatchN (None, 1, 1, 192) 576 ['conv2d_75[0][0]']
ormalization)
activation_71 (Activation) (None, 1, 1, 320) 0 ['batch_normalization_71[0][0]']
activation_75 (Activation) (None, 1, 1, 192) 0 ['batch_normalization_75[0][0]']
max_pooling2d_3 (MaxPooling2D) (None, 1, 1, 768) 0 ['mixed7[0][0]']
mixed8 (Concatenate) (None, 1, 1, 1280) 0 ['activation_71[0][0]',
'activation_75[0][0]',
'max_pooling2d_3[0][0]']
conv2d_80 (Conv2D) (None, 1, 1, 448) 573440 ['mixed8[0][0]']
batch_normalization_80 (BatchN (None, 1, 1, 448) 1344 ['conv2d_80[0][0]']
ormalization)
activation_80 (Activation) (None, 1, 1, 448) 0 ['batch_normalization_80[0][0]']
conv2d_77 (Conv2D) (None, 1, 1, 384) 491520 ['mixed8[0][0]']
conv2d_81 (Conv2D) (None, 1, 1, 384) 1548288 ['activation_80[0][0]']
batch_normalization_77 (BatchN (None, 1, 1, 384) 1152 ['conv2d_77[0][0]']
ormalization)
batch_normalization_81 (BatchN (None, 1, 1, 384) 1152 ['conv2d_81[0][0]']
ormalization)
activation_77 (Activation) (None, 1, 1, 384) 0 ['batch_normalization_77[0][0]']
activation_81 (Activation) (None, 1, 1, 384) 0 ['batch_normalization_81[0][0]']
conv2d_78 (Conv2D) (None, 1, 1, 384) 442368 ['activation_77[0][0]']
conv2d_79 (Conv2D) (None, 1, 1, 384) 442368 ['activation_77[0][0]']
conv2d_82 (Conv2D) (None, 1, 1, 384) 442368 ['activation_81[0][0]']
conv2d_83 (Conv2D) (None, 1, 1, 384) 442368 ['activation_81[0][0]']
average_pooling2d_7 (AveragePo (None, 1, 1, 1280) 0 ['mixed8[0][0]']
oling2D)
conv2d_76 (Conv2D) (None, 1, 1, 320) 409600 ['mixed8[0][0]']
batch_normalization_78 (BatchN (None, 1, 1, 384) 1152 ['conv2d_78[0][0]']
ormalization)
batch_normalization_79 (BatchN (None, 1, 1, 384) 1152 ['conv2d_79[0][0]']
ormalization)
batch_normalization_82 (BatchN (None, 1, 1, 384) 1152 ['conv2d_82[0][0]']
ormalization)
batch_normalization_83 (BatchN (None, 1, 1, 384) 1152 ['conv2d_83[0][0]']
ormalization)
conv2d_84 (Conv2D) (None, 1, 1, 192) 245760 ['average_pooling2d_7[0][0]']
batch_normalization_76 (BatchN (None, 1, 1, 320) 960 ['conv2d_76[0][0]']
ormalization)
activation_78 (Activation) (None, 1, 1, 384) 0 ['batch_normalization_78[0][0]']
activation_79 (Activation) (None, 1, 1, 384) 0 ['batch_normalization_79[0][0]']
activation_82 (Activation) (None, 1, 1, 384) 0 ['batch_normalization_82[0][0]']
activation_83 (Activation) (None, 1, 1, 384) 0 ['batch_normalization_83[0][0]']
batch_normalization_84 (BatchN (None, 1, 1, 192) 576 ['conv2d_84[0][0]']
ormalization)
activation_76 (Activation) (None, 1, 1, 320) 0 ['batch_normalization_76[0][0]']
mixed9_0 (Concatenate) (None, 1, 1, 768) 0 ['activation_78[0][0]',
'activation_79[0][0]']
concatenate (Concatenate) (None, 1, 1, 768) 0 ['activation_82[0][0]',
'activation_83[0][0]']
activation_84 (Activation) (None, 1, 1, 192) 0 ['batch_normalization_84[0][0]']
mixed9 (Concatenate) (None, 1, 1, 2048) 0 ['activation_76[0][0]',
'mixed9_0[0][0]',
'concatenate[0][0]',
'activation_84[0][0]']
conv2d_89 (Conv2D) (None, 1, 1, 448) 917504 ['mixed9[0][0]']
batch_normalization_89 (BatchN (None, 1, 1, 448) 1344 ['conv2d_89[0][0]']
ormalization)
activation_89 (Activation) (None, 1, 1, 448) 0 ['batch_normalization_89[0][0]']
conv2d_86 (Conv2D) (None, 1, 1, 384) 786432 ['mixed9[0][0]']
conv2d_90 (Conv2D) (None, 1, 1, 384) 1548288 ['activation_89[0][0]']
batch_normalization_86 (BatchN (None, 1, 1, 384) 1152 ['conv2d_86[0][0]']
ormalization)
batch_normalization_90 (BatchN (None, 1, 1, 384) 1152 ['conv2d_90[0][0]']
ormalization)
activation_86 (Activation) (None, 1, 1, 384) 0 ['batch_normalization_86[0][0]']
activation_90 (Activation) (None, 1, 1, 384) 0 ['batch_normalization_90[0][0]']
conv2d_87 (Conv2D) (None, 1, 1, 384) 442368 ['activation_86[0][0]']
conv2d_88 (Conv2D) (None, 1, 1, 384) 442368 ['activation_86[0][0]']
conv2d_91 (Conv2D) (None, 1, 1, 384) 442368 ['activation_90[0][0]']
conv2d_92 (Conv2D) (None, 1, 1, 384) 442368 ['activation_90[0][0]']
average_pooling2d_8 (AveragePo (None, 1, 1, 2048) 0 ['mixed9[0][0]']
oling2D)
conv2d_85 (Conv2D) (None, 1, 1, 320) 655360 ['mixed9[0][0]']
batch_normalization_87 (BatchN (None, 1, 1, 384) 1152 ['conv2d_87[0][0]']
ormalization)
batch_normalization_88 (BatchN (None, 1, 1, 384) 1152 ['conv2d_88[0][0]']
ormalization)
batch_normalization_91 (BatchN (None, 1, 1, 384) 1152 ['conv2d_91[0][0]']
ormalization)
batch_normalization_92 (BatchN (None, 1, 1, 384) 1152 ['conv2d_92[0][0]']
ormalization)
conv2d_93 (Conv2D) (None, 1, 1, 192) 393216 ['average_pooling2d_8[0][0]']
batch_normalization_85 (BatchN (None, 1, 1, 320) 960 ['conv2d_85[0][0]']
ormalization)
activation_87 (Activation) (None, 1, 1, 384) 0 ['batch_normalization_87[0][0]']
activation_88 (Activation) (None, 1, 1, 384) 0 ['batch_normalization_88[0][0]']
activation_91 (Activation) (None, 1, 1, 384) 0 ['batch_normalization_91[0][0]']
activation_92 (Activation) (None, 1, 1, 384) 0 ['batch_normalization_92[0][0]']
batch_normalization_93 (BatchN (None, 1, 1, 192) 576 ['conv2d_93[0][0]']
ormalization)
activation_85 (Activation) (None, 1, 1, 320) 0 ['batch_normalization_85[0][0]']
mixed9_1 (Concatenate) (None, 1, 1, 768) 0 ['activation_87[0][0]',
'activation_88[0][0]']
concatenate_1 (Concatenate) (None, 1, 1, 768) 0 ['activation_91[0][0]',
'activation_92[0][0]']
activation_93 (Activation) (None, 1, 1, 192) 0 ['batch_normalization_93[0][0]']
mixed10 (Concatenate) (None, 1, 1, 2048) 0 ['activation_85[0][0]',
'mixed9_1[0][0]',
'concatenate_1[0][0]',
'activation_93[0][0]']
flatten (Flatten) (None, 2048) 0 ['mixed10[0][0]']
dense (Dense) (None, 256) 524544 ['flatten[0][0]']
dense_1 (Dense) (None, 128) 32896 ['dense[0][0]']
dropout (Dropout) (None, 128) 0 ['dense_1[0][0]']
dense_2 (Dense) (None, 64) 8256 ['dropout[0][0]']
batch_normalization_94 (BatchN (None, 64) 256 ['dense_2[0][0]']
ormalization)
dense_3 (Dense) (None, 2) 130 ['batch_normalization_94[0][0]']
==================================================================================================
Total params: 22,368,866
Trainable params: 22,334,306
Non-trainable params: 34,560
__________________________________________________________________________________________________
Epoch 1/20
312/312 [==============================] - 468s 1s/step - loss: 0.1642 - accuracy: 0.9444 - val_loss: 0.3685 - val_accuracy: 0.9305
Epoch 2/20
312/312 [==============================] - 459s 1s/step - loss: 0.0892 - accuracy: 0.9732 - val_loss: 0.2722 - val_accuracy: 0.9415
Epoch 3/20
312/312 [==============================] - 457s 1s/step - loss: 0.0907 - accuracy: 0.9725 - val_loss: 0.1341 - val_accuracy: 0.9768
Epoch 4/20
312/312 [==============================] - 456s 1s/step - loss: 0.0720 - accuracy: 0.9759 - val_loss: 0.0688 - val_accuracy: 0.9790
Epoch 5/20
312/312 [==============================] - 455s 1s/step - loss: 0.0629 - accuracy: 0.9785 - val_loss: 0.0665 - val_accuracy: 0.9746
Epoch 6/20
312/312 [==============================] - 455s 1s/step - loss: 0.0579 - accuracy: 0.9797 - val_loss: 0.0615 - val_accuracy: 0.9790
Epoch 7/20
312/312 [==============================] - 455s 1s/step - loss: 0.0577 - accuracy: 0.9785 - val_loss: 0.0728 - val_accuracy: 0.9796
Epoch 8/20
312/312 [==============================] - 454s 1s/step - loss: 0.0575 - accuracy: 0.9800 - val_loss: 0.0972 - val_accuracy: 0.9752
Calculating Accuracy:
82/82 [==============================] - 9s 92ms/step - loss: 0.0985 - accuracy: 0.9765
Test Accuracy: 0.9765384793281555
Classification Report
precision recall f1-score support
0 1.00 0.96 0.98 1300
1 0.96 1.00 0.98 1300
accuracy 0.98 2600
macro avg 0.98 0.98 0.98 2600
weighted avg 0.98 0.98 0.98 2600
Confusion Matrix
Train and Validation Accuracy
model42 = ModelWithTransferLearningInceptionV3(hsvData_increasedSize, 'Model With Transfer Learning Inception V3 | HSV data')
model42.execute()
Model: "inception_v3"
__________________________________________________________________________________________________
Layer (type) Output Shape Param # Connected to
==================================================================================================
input_1 (InputLayer) [(None, 75, 75, 3)] 0 []
conv2d (Conv2D) (None, 37, 37, 32) 864 ['input_1[0][0]']
batch_normalization (BatchNorm (None, 37, 37, 32) 96 ['conv2d[0][0]']
alization)
activation (Activation) (None, 37, 37, 32) 0 ['batch_normalization[0][0]']
conv2d_1 (Conv2D) (None, 35, 35, 32) 9216 ['activation[0][0]']
batch_normalization_1 (BatchNo (None, 35, 35, 32) 96 ['conv2d_1[0][0]']
rmalization)
activation_1 (Activation) (None, 35, 35, 32) 0 ['batch_normalization_1[0][0]']
conv2d_2 (Conv2D) (None, 35, 35, 64) 18432 ['activation_1[0][0]']
batch_normalization_2 (BatchNo (None, 35, 35, 64) 192 ['conv2d_2[0][0]']
rmalization)
activation_2 (Activation) (None, 35, 35, 64) 0 ['batch_normalization_2[0][0]']
max_pooling2d (MaxPooling2D) (None, 17, 17, 64) 0 ['activation_2[0][0]']
conv2d_3 (Conv2D) (None, 17, 17, 80) 5120 ['max_pooling2d[0][0]']
batch_normalization_3 (BatchNo (None, 17, 17, 80) 240 ['conv2d_3[0][0]']
rmalization)
activation_3 (Activation) (None, 17, 17, 80) 0 ['batch_normalization_3[0][0]']
conv2d_4 (Conv2D) (None, 15, 15, 192) 138240 ['activation_3[0][0]']
batch_normalization_4 (BatchNo (None, 15, 15, 192) 576 ['conv2d_4[0][0]']
rmalization)
activation_4 (Activation) (None, 15, 15, 192) 0 ['batch_normalization_4[0][0]']
max_pooling2d_1 (MaxPooling2D) (None, 7, 7, 192) 0 ['activation_4[0][0]']
conv2d_8 (Conv2D) (None, 7, 7, 64) 12288 ['max_pooling2d_1[0][0]']
batch_normalization_8 (BatchNo (None, 7, 7, 64) 192 ['conv2d_8[0][0]']
rmalization)
activation_8 (Activation) (None, 7, 7, 64) 0 ['batch_normalization_8[0][0]']
conv2d_6 (Conv2D) (None, 7, 7, 48) 9216 ['max_pooling2d_1[0][0]']
conv2d_9 (Conv2D) (None, 7, 7, 96) 55296 ['activation_8[0][0]']
batch_normalization_6 (BatchNo (None, 7, 7, 48) 144 ['conv2d_6[0][0]']
rmalization)
batch_normalization_9 (BatchNo (None, 7, 7, 96) 288 ['conv2d_9[0][0]']
rmalization)
activation_6 (Activation) (None, 7, 7, 48) 0 ['batch_normalization_6[0][0]']
activation_9 (Activation) (None, 7, 7, 96) 0 ['batch_normalization_9[0][0]']
average_pooling2d (AveragePool (None, 7, 7, 192) 0 ['max_pooling2d_1[0][0]']
ing2D)
conv2d_5 (Conv2D) (None, 7, 7, 64) 12288 ['max_pooling2d_1[0][0]']
conv2d_7 (Conv2D) (None, 7, 7, 64) 76800 ['activation_6[0][0]']
conv2d_10 (Conv2D) (None, 7, 7, 96) 82944 ['activation_9[0][0]']
conv2d_11 (Conv2D) (None, 7, 7, 32) 6144 ['average_pooling2d[0][0]']
batch_normalization_5 (BatchNo (None, 7, 7, 64) 192 ['conv2d_5[0][0]']
rmalization)
batch_normalization_7 (BatchNo (None, 7, 7, 64) 192 ['conv2d_7[0][0]']
rmalization)
batch_normalization_10 (BatchN (None, 7, 7, 96) 288 ['conv2d_10[0][0]']
ormalization)
batch_normalization_11 (BatchN (None, 7, 7, 32) 96 ['conv2d_11[0][0]']
ormalization)
activation_5 (Activation) (None, 7, 7, 64) 0 ['batch_normalization_5[0][0]']
activation_7 (Activation) (None, 7, 7, 64) 0 ['batch_normalization_7[0][0]']
activation_10 (Activation) (None, 7, 7, 96) 0 ['batch_normalization_10[0][0]']
activation_11 (Activation) (None, 7, 7, 32) 0 ['batch_normalization_11[0][0]']
mixed0 (Concatenate) (None, 7, 7, 256) 0 ['activation_5[0][0]',
'activation_7[0][0]',
'activation_10[0][0]',
'activation_11[0][0]']
conv2d_15 (Conv2D) (None, 7, 7, 64) 16384 ['mixed0[0][0]']
batch_normalization_15 (BatchN (None, 7, 7, 64) 192 ['conv2d_15[0][0]']
ormalization)
activation_15 (Activation) (None, 7, 7, 64) 0 ['batch_normalization_15[0][0]']
conv2d_13 (Conv2D) (None, 7, 7, 48) 12288 ['mixed0[0][0]']
conv2d_16 (Conv2D) (None, 7, 7, 96) 55296 ['activation_15[0][0]']
batch_normalization_13 (BatchN (None, 7, 7, 48) 144 ['conv2d_13[0][0]']
ormalization)
batch_normalization_16 (BatchN (None, 7, 7, 96) 288 ['conv2d_16[0][0]']
ormalization)
activation_13 (Activation) (None, 7, 7, 48) 0 ['batch_normalization_13[0][0]']
activation_16 (Activation) (None, 7, 7, 96) 0 ['batch_normalization_16[0][0]']
average_pooling2d_1 (AveragePo (None, 7, 7, 256) 0 ['mixed0[0][0]']
oling2D)
conv2d_12 (Conv2D) (None, 7, 7, 64) 16384 ['mixed0[0][0]']
conv2d_14 (Conv2D) (None, 7, 7, 64) 76800 ['activation_13[0][0]']
conv2d_17 (Conv2D) (None, 7, 7, 96) 82944 ['activation_16[0][0]']
conv2d_18 (Conv2D) (None, 7, 7, 64) 16384 ['average_pooling2d_1[0][0]']
batch_normalization_12 (BatchN (None, 7, 7, 64) 192 ['conv2d_12[0][0]']
ormalization)
batch_normalization_14 (BatchN (None, 7, 7, 64) 192 ['conv2d_14[0][0]']
ormalization)
batch_normalization_17 (BatchN (None, 7, 7, 96) 288 ['conv2d_17[0][0]']
ormalization)
batch_normalization_18 (BatchN (None, 7, 7, 64) 192 ['conv2d_18[0][0]']
ormalization)
activation_12 (Activation) (None, 7, 7, 64) 0 ['batch_normalization_12[0][0]']
activation_14 (Activation) (None, 7, 7, 64) 0 ['batch_normalization_14[0][0]']
activation_17 (Activation) (None, 7, 7, 96) 0 ['batch_normalization_17[0][0]']
activation_18 (Activation) (None, 7, 7, 64) 0 ['batch_normalization_18[0][0]']
mixed1 (Concatenate) (None, 7, 7, 288) 0 ['activation_12[0][0]',
'activation_14[0][0]',
'activation_17[0][0]',
'activation_18[0][0]']
conv2d_22 (Conv2D) (None, 7, 7, 64) 18432 ['mixed1[0][0]']
batch_normalization_22 (BatchN (None, 7, 7, 64) 192 ['conv2d_22[0][0]']
ormalization)
activation_22 (Activation) (None, 7, 7, 64) 0 ['batch_normalization_22[0][0]']
conv2d_20 (Conv2D) (None, 7, 7, 48) 13824 ['mixed1[0][0]']
conv2d_23 (Conv2D) (None, 7, 7, 96) 55296 ['activation_22[0][0]']
batch_normalization_20 (BatchN (None, 7, 7, 48) 144 ['conv2d_20[0][0]']
ormalization)
batch_normalization_23 (BatchN (None, 7, 7, 96) 288 ['conv2d_23[0][0]']
ormalization)
activation_20 (Activation) (None, 7, 7, 48) 0 ['batch_normalization_20[0][0]']
activation_23 (Activation) (None, 7, 7, 96) 0 ['batch_normalization_23[0][0]']
average_pooling2d_2 (AveragePo (None, 7, 7, 288) 0 ['mixed1[0][0]']
oling2D)
conv2d_19 (Conv2D) (None, 7, 7, 64) 18432 ['mixed1[0][0]']
conv2d_21 (Conv2D) (None, 7, 7, 64) 76800 ['activation_20[0][0]']
conv2d_24 (Conv2D) (None, 7, 7, 96) 82944 ['activation_23[0][0]']
conv2d_25 (Conv2D) (None, 7, 7, 64) 18432 ['average_pooling2d_2[0][0]']
batch_normalization_19 (BatchN (None, 7, 7, 64) 192 ['conv2d_19[0][0]']
ormalization)
batch_normalization_21 (BatchN (None, 7, 7, 64) 192 ['conv2d_21[0][0]']
ormalization)
batch_normalization_24 (BatchN (None, 7, 7, 96) 288 ['conv2d_24[0][0]']
ormalization)
batch_normalization_25 (BatchN (None, 7, 7, 64) 192 ['conv2d_25[0][0]']
ormalization)
activation_19 (Activation) (None, 7, 7, 64) 0 ['batch_normalization_19[0][0]']
activation_21 (Activation) (None, 7, 7, 64) 0 ['batch_normalization_21[0][0]']
activation_24 (Activation) (None, 7, 7, 96) 0 ['batch_normalization_24[0][0]']
activation_25 (Activation) (None, 7, 7, 64) 0 ['batch_normalization_25[0][0]']
mixed2 (Concatenate) (None, 7, 7, 288) 0 ['activation_19[0][0]',
'activation_21[0][0]',
'activation_24[0][0]',
'activation_25[0][0]']
conv2d_27 (Conv2D) (None, 7, 7, 64) 18432 ['mixed2[0][0]']
batch_normalization_27 (BatchN (None, 7, 7, 64) 192 ['conv2d_27[0][0]']
ormalization)
activation_27 (Activation) (None, 7, 7, 64) 0 ['batch_normalization_27[0][0]']
conv2d_28 (Conv2D) (None, 7, 7, 96) 55296 ['activation_27[0][0]']
batch_normalization_28 (BatchN (None, 7, 7, 96) 288 ['conv2d_28[0][0]']
ormalization)
activation_28 (Activation) (None, 7, 7, 96) 0 ['batch_normalization_28[0][0]']
conv2d_26 (Conv2D) (None, 3, 3, 384) 995328 ['mixed2[0][0]']
conv2d_29 (Conv2D) (None, 3, 3, 96) 82944 ['activation_28[0][0]']
batch_normalization_26 (BatchN (None, 3, 3, 384) 1152 ['conv2d_26[0][0]']
ormalization)
batch_normalization_29 (BatchN (None, 3, 3, 96) 288 ['conv2d_29[0][0]']
ormalization)
activation_26 (Activation) (None, 3, 3, 384) 0 ['batch_normalization_26[0][0]']
activation_29 (Activation) (None, 3, 3, 96) 0 ['batch_normalization_29[0][0]']
max_pooling2d_2 (MaxPooling2D) (None, 3, 3, 288) 0 ['mixed2[0][0]']
mixed3 (Concatenate) (None, 3, 3, 768) 0 ['activation_26[0][0]',
'activation_29[0][0]',
'max_pooling2d_2[0][0]']
conv2d_34 (Conv2D) (None, 3, 3, 128) 98304 ['mixed3[0][0]']
batch_normalization_34 (BatchN (None, 3, 3, 128) 384 ['conv2d_34[0][0]']
ormalization)
activation_34 (Activation) (None, 3, 3, 128) 0 ['batch_normalization_34[0][0]']
conv2d_35 (Conv2D) (None, 3, 3, 128) 114688 ['activation_34[0][0]']
batch_normalization_35 (BatchN (None, 3, 3, 128) 384 ['conv2d_35[0][0]']
ormalization)
activation_35 (Activation) (None, 3, 3, 128) 0 ['batch_normalization_35[0][0]']
conv2d_31 (Conv2D) (None, 3, 3, 128) 98304 ['mixed3[0][0]']
conv2d_36 (Conv2D) (None, 3, 3, 128) 114688 ['activation_35[0][0]']
batch_normalization_31 (BatchN (None, 3, 3, 128) 384 ['conv2d_31[0][0]']
ormalization)
batch_normalization_36 (BatchN (None, 3, 3, 128) 384 ['conv2d_36[0][0]']
ormalization)
activation_31 (Activation) (None, 3, 3, 128) 0 ['batch_normalization_31[0][0]']
activation_36 (Activation) (None, 3, 3, 128) 0 ['batch_normalization_36[0][0]']
conv2d_32 (Conv2D) (None, 3, 3, 128) 114688 ['activation_31[0][0]']
conv2d_37 (Conv2D) (None, 3, 3, 128) 114688 ['activation_36[0][0]']
batch_normalization_32 (BatchN (None, 3, 3, 128) 384 ['conv2d_32[0][0]']
ormalization)
batch_normalization_37 (BatchN (None, 3, 3, 128) 384 ['conv2d_37[0][0]']
ormalization)
activation_32 (Activation) (None, 3, 3, 128) 0 ['batch_normalization_32[0][0]']
activation_37 (Activation) (None, 3, 3, 128) 0 ['batch_normalization_37[0][0]']
average_pooling2d_3 (AveragePo (None, 3, 3, 768) 0 ['mixed3[0][0]']
oling2D)
conv2d_30 (Conv2D) (None, 3, 3, 192) 147456 ['mixed3[0][0]']
conv2d_33 (Conv2D) (None, 3, 3, 192) 172032 ['activation_32[0][0]']
conv2d_38 (Conv2D) (None, 3, 3, 192) 172032 ['activation_37[0][0]']
conv2d_39 (Conv2D) (None, 3, 3, 192) 147456 ['average_pooling2d_3[0][0]']
batch_normalization_30 (BatchN (None, 3, 3, 192) 576 ['conv2d_30[0][0]']
ormalization)
batch_normalization_33 (BatchN (None, 3, 3, 192) 576 ['conv2d_33[0][0]']
ormalization)
batch_normalization_38 (BatchN (None, 3, 3, 192) 576 ['conv2d_38[0][0]']
ormalization)
batch_normalization_39 (BatchN (None, 3, 3, 192) 576 ['conv2d_39[0][0]']
ormalization)
activation_30 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_30[0][0]']
activation_33 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_33[0][0]']
activation_38 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_38[0][0]']
activation_39 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_39[0][0]']
mixed4 (Concatenate) (None, 3, 3, 768) 0 ['activation_30[0][0]',
'activation_33[0][0]',
'activation_38[0][0]',
'activation_39[0][0]']
conv2d_44 (Conv2D) (None, 3, 3, 160) 122880 ['mixed4[0][0]']
batch_normalization_44 (BatchN (None, 3, 3, 160) 480 ['conv2d_44[0][0]']
ormalization)
activation_44 (Activation) (None, 3, 3, 160) 0 ['batch_normalization_44[0][0]']
conv2d_45 (Conv2D) (None, 3, 3, 160) 179200 ['activation_44[0][0]']
batch_normalization_45 (BatchN (None, 3, 3, 160) 480 ['conv2d_45[0][0]']
ormalization)
activation_45 (Activation) (None, 3, 3, 160) 0 ['batch_normalization_45[0][0]']
conv2d_41 (Conv2D) (None, 3, 3, 160) 122880 ['mixed4[0][0]']
conv2d_46 (Conv2D) (None, 3, 3, 160) 179200 ['activation_45[0][0]']
batch_normalization_41 (BatchN (None, 3, 3, 160) 480 ['conv2d_41[0][0]']
ormalization)
batch_normalization_46 (BatchN (None, 3, 3, 160) 480 ['conv2d_46[0][0]']
ormalization)
activation_41 (Activation) (None, 3, 3, 160) 0 ['batch_normalization_41[0][0]']
activation_46 (Activation) (None, 3, 3, 160) 0 ['batch_normalization_46[0][0]']
conv2d_42 (Conv2D) (None, 3, 3, 160) 179200 ['activation_41[0][0]']
conv2d_47 (Conv2D) (None, 3, 3, 160) 179200 ['activation_46[0][0]']
batch_normalization_42 (BatchN (None, 3, 3, 160) 480 ['conv2d_42[0][0]']
ormalization)
batch_normalization_47 (BatchN (None, 3, 3, 160) 480 ['conv2d_47[0][0]']
ormalization)
activation_42 (Activation) (None, 3, 3, 160) 0 ['batch_normalization_42[0][0]']
activation_47 (Activation) (None, 3, 3, 160) 0 ['batch_normalization_47[0][0]']
average_pooling2d_4 (AveragePo (None, 3, 3, 768) 0 ['mixed4[0][0]']
oling2D)
conv2d_40 (Conv2D) (None, 3, 3, 192) 147456 ['mixed4[0][0]']
conv2d_43 (Conv2D) (None, 3, 3, 192) 215040 ['activation_42[0][0]']
conv2d_48 (Conv2D) (None, 3, 3, 192) 215040 ['activation_47[0][0]']
conv2d_49 (Conv2D) (None, 3, 3, 192) 147456 ['average_pooling2d_4[0][0]']
batch_normalization_40 (BatchN (None, 3, 3, 192) 576 ['conv2d_40[0][0]']
ormalization)
batch_normalization_43 (BatchN (None, 3, 3, 192) 576 ['conv2d_43[0][0]']
ormalization)
batch_normalization_48 (BatchN (None, 3, 3, 192) 576 ['conv2d_48[0][0]']
ormalization)
batch_normalization_49 (BatchN (None, 3, 3, 192) 576 ['conv2d_49[0][0]']
ormalization)
activation_40 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_40[0][0]']
activation_43 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_43[0][0]']
activation_48 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_48[0][0]']
activation_49 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_49[0][0]']
mixed5 (Concatenate) (None, 3, 3, 768) 0 ['activation_40[0][0]',
'activation_43[0][0]',
'activation_48[0][0]',
'activation_49[0][0]']
conv2d_54 (Conv2D) (None, 3, 3, 160) 122880 ['mixed5[0][0]']
batch_normalization_54 (BatchN (None, 3, 3, 160) 480 ['conv2d_54[0][0]']
ormalization)
activation_54 (Activation) (None, 3, 3, 160) 0 ['batch_normalization_54[0][0]']
conv2d_55 (Conv2D) (None, 3, 3, 160) 179200 ['activation_54[0][0]']
batch_normalization_55 (BatchN (None, 3, 3, 160) 480 ['conv2d_55[0][0]']
ormalization)
activation_55 (Activation) (None, 3, 3, 160) 0 ['batch_normalization_55[0][0]']
conv2d_51 (Conv2D) (None, 3, 3, 160) 122880 ['mixed5[0][0]']
conv2d_56 (Conv2D) (None, 3, 3, 160) 179200 ['activation_55[0][0]']
batch_normalization_51 (BatchN (None, 3, 3, 160) 480 ['conv2d_51[0][0]']
ormalization)
batch_normalization_56 (BatchN (None, 3, 3, 160) 480 ['conv2d_56[0][0]']
ormalization)
activation_51 (Activation) (None, 3, 3, 160) 0 ['batch_normalization_51[0][0]']
activation_56 (Activation) (None, 3, 3, 160) 0 ['batch_normalization_56[0][0]']
conv2d_52 (Conv2D) (None, 3, 3, 160) 179200 ['activation_51[0][0]']
conv2d_57 (Conv2D) (None, 3, 3, 160) 179200 ['activation_56[0][0]']
batch_normalization_52 (BatchN (None, 3, 3, 160) 480 ['conv2d_52[0][0]']
ormalization)
batch_normalization_57 (BatchN (None, 3, 3, 160) 480 ['conv2d_57[0][0]']
ormalization)
activation_52 (Activation) (None, 3, 3, 160) 0 ['batch_normalization_52[0][0]']
activation_57 (Activation) (None, 3, 3, 160) 0 ['batch_normalization_57[0][0]']
average_pooling2d_5 (AveragePo (None, 3, 3, 768) 0 ['mixed5[0][0]']
oling2D)
conv2d_50 (Conv2D) (None, 3, 3, 192) 147456 ['mixed5[0][0]']
conv2d_53 (Conv2D) (None, 3, 3, 192) 215040 ['activation_52[0][0]']
conv2d_58 (Conv2D) (None, 3, 3, 192) 215040 ['activation_57[0][0]']
conv2d_59 (Conv2D) (None, 3, 3, 192) 147456 ['average_pooling2d_5[0][0]']
batch_normalization_50 (BatchN (None, 3, 3, 192) 576 ['conv2d_50[0][0]']
ormalization)
batch_normalization_53 (BatchN (None, 3, 3, 192) 576 ['conv2d_53[0][0]']
ormalization)
batch_normalization_58 (BatchN (None, 3, 3, 192) 576 ['conv2d_58[0][0]']
ormalization)
batch_normalization_59 (BatchN (None, 3, 3, 192) 576 ['conv2d_59[0][0]']
ormalization)
activation_50 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_50[0][0]']
activation_53 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_53[0][0]']
activation_58 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_58[0][0]']
activation_59 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_59[0][0]']
mixed6 (Concatenate) (None, 3, 3, 768) 0 ['activation_50[0][0]',
'activation_53[0][0]',
'activation_58[0][0]',
'activation_59[0][0]']
conv2d_64 (Conv2D) (None, 3, 3, 192) 147456 ['mixed6[0][0]']
batch_normalization_64 (BatchN (None, 3, 3, 192) 576 ['conv2d_64[0][0]']
ormalization)
activation_64 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_64[0][0]']
conv2d_65 (Conv2D) (None, 3, 3, 192) 258048 ['activation_64[0][0]']
batch_normalization_65 (BatchN (None, 3, 3, 192) 576 ['conv2d_65[0][0]']
ormalization)
activation_65 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_65[0][0]']
conv2d_61 (Conv2D) (None, 3, 3, 192) 147456 ['mixed6[0][0]']
conv2d_66 (Conv2D) (None, 3, 3, 192) 258048 ['activation_65[0][0]']
batch_normalization_61 (BatchN (None, 3, 3, 192) 576 ['conv2d_61[0][0]']
ormalization)
batch_normalization_66 (BatchN (None, 3, 3, 192) 576 ['conv2d_66[0][0]']
ormalization)
activation_61 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_61[0][0]']
activation_66 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_66[0][0]']
conv2d_62 (Conv2D) (None, 3, 3, 192) 258048 ['activation_61[0][0]']
conv2d_67 (Conv2D) (None, 3, 3, 192) 258048 ['activation_66[0][0]']
batch_normalization_62 (BatchN (None, 3, 3, 192) 576 ['conv2d_62[0][0]']
ormalization)
batch_normalization_67 (BatchN (None, 3, 3, 192) 576 ['conv2d_67[0][0]']
ormalization)
activation_62 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_62[0][0]']
activation_67 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_67[0][0]']
average_pooling2d_6 (AveragePo (None, 3, 3, 768) 0 ['mixed6[0][0]']
oling2D)
conv2d_60 (Conv2D) (None, 3, 3, 192) 147456 ['mixed6[0][0]']
conv2d_63 (Conv2D) (None, 3, 3, 192) 258048 ['activation_62[0][0]']
conv2d_68 (Conv2D) (None, 3, 3, 192) 258048 ['activation_67[0][0]']
conv2d_69 (Conv2D) (None, 3, 3, 192) 147456 ['average_pooling2d_6[0][0]']
batch_normalization_60 (BatchN (None, 3, 3, 192) 576 ['conv2d_60[0][0]']
ormalization)
batch_normalization_63 (BatchN (None, 3, 3, 192) 576 ['conv2d_63[0][0]']
ormalization)
batch_normalization_68 (BatchN (None, 3, 3, 192) 576 ['conv2d_68[0][0]']
ormalization)
batch_normalization_69 (BatchN (None, 3, 3, 192) 576 ['conv2d_69[0][0]']
ormalization)
activation_60 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_60[0][0]']
activation_63 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_63[0][0]']
activation_68 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_68[0][0]']
activation_69 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_69[0][0]']
mixed7 (Concatenate) (None, 3, 3, 768) 0 ['activation_60[0][0]',
'activation_63[0][0]',
'activation_68[0][0]',
'activation_69[0][0]']
conv2d_72 (Conv2D) (None, 3, 3, 192) 147456 ['mixed7[0][0]']
batch_normalization_72 (BatchN (None, 3, 3, 192) 576 ['conv2d_72[0][0]']
ormalization)
activation_72 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_72[0][0]']
conv2d_73 (Conv2D) (None, 3, 3, 192) 258048 ['activation_72[0][0]']
batch_normalization_73 (BatchN (None, 3, 3, 192) 576 ['conv2d_73[0][0]']
ormalization)
activation_73 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_73[0][0]']
conv2d_70 (Conv2D) (None, 3, 3, 192) 147456 ['mixed7[0][0]']
conv2d_74 (Conv2D) (None, 3, 3, 192) 258048 ['activation_73[0][0]']
batch_normalization_70 (BatchN (None, 3, 3, 192) 576 ['conv2d_70[0][0]']
ormalization)
batch_normalization_74 (BatchN (None, 3, 3, 192) 576 ['conv2d_74[0][0]']
ormalization)
activation_70 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_70[0][0]']
activation_74 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_74[0][0]']
conv2d_71 (Conv2D) (None, 1, 1, 320) 552960 ['activation_70[0][0]']
conv2d_75 (Conv2D) (None, 1, 1, 192) 331776 ['activation_74[0][0]']
batch_normalization_71 (BatchN (None, 1, 1, 320) 960 ['conv2d_71[0][0]']
ormalization)
batch_normalization_75 (BatchN (None, 1, 1, 192) 576 ['conv2d_75[0][0]']
ormalization)
activation_71 (Activation) (None, 1, 1, 320) 0 ['batch_normalization_71[0][0]']
activation_75 (Activation) (None, 1, 1, 192) 0 ['batch_normalization_75[0][0]']
max_pooling2d_3 (MaxPooling2D) (None, 1, 1, 768) 0 ['mixed7[0][0]']
mixed8 (Concatenate) (None, 1, 1, 1280) 0 ['activation_71[0][0]',
'activation_75[0][0]',
'max_pooling2d_3[0][0]']
conv2d_80 (Conv2D) (None, 1, 1, 448) 573440 ['mixed8[0][0]']
batch_normalization_80 (BatchN (None, 1, 1, 448) 1344 ['conv2d_80[0][0]']
ormalization)
activation_80 (Activation) (None, 1, 1, 448) 0 ['batch_normalization_80[0][0]']
conv2d_77 (Conv2D) (None, 1, 1, 384) 491520 ['mixed8[0][0]']
conv2d_81 (Conv2D) (None, 1, 1, 384) 1548288 ['activation_80[0][0]']
batch_normalization_77 (BatchN (None, 1, 1, 384) 1152 ['conv2d_77[0][0]']
ormalization)
batch_normalization_81 (BatchN (None, 1, 1, 384) 1152 ['conv2d_81[0][0]']
ormalization)
activation_77 (Activation) (None, 1, 1, 384) 0 ['batch_normalization_77[0][0]']
activation_81 (Activation) (None, 1, 1, 384) 0 ['batch_normalization_81[0][0]']
conv2d_78 (Conv2D) (None, 1, 1, 384) 442368 ['activation_77[0][0]']
conv2d_79 (Conv2D) (None, 1, 1, 384) 442368 ['activation_77[0][0]']
conv2d_82 (Conv2D) (None, 1, 1, 384) 442368 ['activation_81[0][0]']
conv2d_83 (Conv2D) (None, 1, 1, 384) 442368 ['activation_81[0][0]']
average_pooling2d_7 (AveragePo (None, 1, 1, 1280) 0 ['mixed8[0][0]']
oling2D)
conv2d_76 (Conv2D) (None, 1, 1, 320) 409600 ['mixed8[0][0]']
batch_normalization_78 (BatchN (None, 1, 1, 384) 1152 ['conv2d_78[0][0]']
ormalization)
batch_normalization_79 (BatchN (None, 1, 1, 384) 1152 ['conv2d_79[0][0]']
ormalization)
batch_normalization_82 (BatchN (None, 1, 1, 384) 1152 ['conv2d_82[0][0]']
ormalization)
batch_normalization_83 (BatchN (None, 1, 1, 384) 1152 ['conv2d_83[0][0]']
ormalization)
conv2d_84 (Conv2D) (None, 1, 1, 192) 245760 ['average_pooling2d_7[0][0]']
batch_normalization_76 (BatchN (None, 1, 1, 320) 960 ['conv2d_76[0][0]']
ormalization)
activation_78 (Activation) (None, 1, 1, 384) 0 ['batch_normalization_78[0][0]']
activation_79 (Activation) (None, 1, 1, 384) 0 ['batch_normalization_79[0][0]']
activation_82 (Activation) (None, 1, 1, 384) 0 ['batch_normalization_82[0][0]']
activation_83 (Activation) (None, 1, 1, 384) 0 ['batch_normalization_83[0][0]']
batch_normalization_84 (BatchN (None, 1, 1, 192) 576 ['conv2d_84[0][0]']
ormalization)
activation_76 (Activation) (None, 1, 1, 320) 0 ['batch_normalization_76[0][0]']
mixed9_0 (Concatenate) (None, 1, 1, 768) 0 ['activation_78[0][0]',
'activation_79[0][0]']
concatenate (Concatenate) (None, 1, 1, 768) 0 ['activation_82[0][0]',
'activation_83[0][0]']
activation_84 (Activation) (None, 1, 1, 192) 0 ['batch_normalization_84[0][0]']
mixed9 (Concatenate) (None, 1, 1, 2048) 0 ['activation_76[0][0]',
'mixed9_0[0][0]',
'concatenate[0][0]',
'activation_84[0][0]']
conv2d_89 (Conv2D) (None, 1, 1, 448) 917504 ['mixed9[0][0]']
batch_normalization_89 (BatchN (None, 1, 1, 448) 1344 ['conv2d_89[0][0]']
ormalization)
activation_89 (Activation) (None, 1, 1, 448) 0 ['batch_normalization_89[0][0]']
conv2d_86 (Conv2D) (None, 1, 1, 384) 786432 ['mixed9[0][0]']
conv2d_90 (Conv2D) (None, 1, 1, 384) 1548288 ['activation_89[0][0]']
batch_normalization_86 (BatchN (None, 1, 1, 384) 1152 ['conv2d_86[0][0]']
ormalization)
batch_normalization_90 (BatchN (None, 1, 1, 384) 1152 ['conv2d_90[0][0]']
ormalization)
activation_86 (Activation) (None, 1, 1, 384) 0 ['batch_normalization_86[0][0]']
activation_90 (Activation) (None, 1, 1, 384) 0 ['batch_normalization_90[0][0]']
conv2d_87 (Conv2D) (None, 1, 1, 384) 442368 ['activation_86[0][0]']
conv2d_88 (Conv2D) (None, 1, 1, 384) 442368 ['activation_86[0][0]']
conv2d_91 (Conv2D) (None, 1, 1, 384) 442368 ['activation_90[0][0]']
conv2d_92 (Conv2D) (None, 1, 1, 384) 442368 ['activation_90[0][0]']
average_pooling2d_8 (AveragePo (None, 1, 1, 2048) 0 ['mixed9[0][0]']
oling2D)
conv2d_85 (Conv2D) (None, 1, 1, 320) 655360 ['mixed9[0][0]']
batch_normalization_87 (BatchN (None, 1, 1, 384) 1152 ['conv2d_87[0][0]']
ormalization)
batch_normalization_88 (BatchN (None, 1, 1, 384) 1152 ['conv2d_88[0][0]']
ormalization)
batch_normalization_91 (BatchN (None, 1, 1, 384) 1152 ['conv2d_91[0][0]']
ormalization)
batch_normalization_92 (BatchN (None, 1, 1, 384) 1152 ['conv2d_92[0][0]']
ormalization)
conv2d_93 (Conv2D) (None, 1, 1, 192) 393216 ['average_pooling2d_8[0][0]']
batch_normalization_85 (BatchN (None, 1, 1, 320) 960 ['conv2d_85[0][0]']
ormalization)
activation_87 (Activation) (None, 1, 1, 384) 0 ['batch_normalization_87[0][0]']
activation_88 (Activation) (None, 1, 1, 384) 0 ['batch_normalization_88[0][0]']
activation_91 (Activation) (None, 1, 1, 384) 0 ['batch_normalization_91[0][0]']
activation_92 (Activation) (None, 1, 1, 384) 0 ['batch_normalization_92[0][0]']
batch_normalization_93 (BatchN (None, 1, 1, 192) 576 ['conv2d_93[0][0]']
ormalization)
activation_85 (Activation) (None, 1, 1, 320) 0 ['batch_normalization_85[0][0]']
mixed9_1 (Concatenate) (None, 1, 1, 768) 0 ['activation_87[0][0]',
'activation_88[0][0]']
concatenate_1 (Concatenate) (None, 1, 1, 768) 0 ['activation_91[0][0]',
'activation_92[0][0]']
activation_93 (Activation) (None, 1, 1, 192) 0 ['batch_normalization_93[0][0]']
mixed10 (Concatenate) (None, 1, 1, 2048) 0 ['activation_85[0][0]',
'mixed9_1[0][0]',
'concatenate_1[0][0]',
'activation_93[0][0]']
==================================================================================================
Total params: 21,802,784
Trainable params: 21,768,352
Non-trainable params: 34,432
__________________________________________________________________________________________________
Model: "model"
__________________________________________________________________________________________________
Layer (type) Output Shape Param # Connected to
==================================================================================================
input_1 (InputLayer) [(None, 75, 75, 3)] 0 []
conv2d (Conv2D) (None, 37, 37, 32) 864 ['input_1[0][0]']
batch_normalization (BatchNorm (None, 37, 37, 32) 96 ['conv2d[0][0]']
alization)
activation (Activation) (None, 37, 37, 32) 0 ['batch_normalization[0][0]']
conv2d_1 (Conv2D) (None, 35, 35, 32) 9216 ['activation[0][0]']
batch_normalization_1 (BatchNo (None, 35, 35, 32) 96 ['conv2d_1[0][0]']
rmalization)
activation_1 (Activation) (None, 35, 35, 32) 0 ['batch_normalization_1[0][0]']
conv2d_2 (Conv2D) (None, 35, 35, 64) 18432 ['activation_1[0][0]']
batch_normalization_2 (BatchNo (None, 35, 35, 64) 192 ['conv2d_2[0][0]']
rmalization)
activation_2 (Activation) (None, 35, 35, 64) 0 ['batch_normalization_2[0][0]']
max_pooling2d (MaxPooling2D) (None, 17, 17, 64) 0 ['activation_2[0][0]']
conv2d_3 (Conv2D) (None, 17, 17, 80) 5120 ['max_pooling2d[0][0]']
batch_normalization_3 (BatchNo (None, 17, 17, 80) 240 ['conv2d_3[0][0]']
rmalization)
activation_3 (Activation) (None, 17, 17, 80) 0 ['batch_normalization_3[0][0]']
conv2d_4 (Conv2D) (None, 15, 15, 192) 138240 ['activation_3[0][0]']
batch_normalization_4 (BatchNo (None, 15, 15, 192) 576 ['conv2d_4[0][0]']
rmalization)
activation_4 (Activation) (None, 15, 15, 192) 0 ['batch_normalization_4[0][0]']
max_pooling2d_1 (MaxPooling2D) (None, 7, 7, 192) 0 ['activation_4[0][0]']
conv2d_8 (Conv2D) (None, 7, 7, 64) 12288 ['max_pooling2d_1[0][0]']
batch_normalization_8 (BatchNo (None, 7, 7, 64) 192 ['conv2d_8[0][0]']
rmalization)
activation_8 (Activation) (None, 7, 7, 64) 0 ['batch_normalization_8[0][0]']
conv2d_6 (Conv2D) (None, 7, 7, 48) 9216 ['max_pooling2d_1[0][0]']
conv2d_9 (Conv2D) (None, 7, 7, 96) 55296 ['activation_8[0][0]']
batch_normalization_6 (BatchNo (None, 7, 7, 48) 144 ['conv2d_6[0][0]']
rmalization)
batch_normalization_9 (BatchNo (None, 7, 7, 96) 288 ['conv2d_9[0][0]']
rmalization)
activation_6 (Activation) (None, 7, 7, 48) 0 ['batch_normalization_6[0][0]']
activation_9 (Activation) (None, 7, 7, 96) 0 ['batch_normalization_9[0][0]']
average_pooling2d (AveragePool (None, 7, 7, 192) 0 ['max_pooling2d_1[0][0]']
ing2D)
conv2d_5 (Conv2D) (None, 7, 7, 64) 12288 ['max_pooling2d_1[0][0]']
conv2d_7 (Conv2D) (None, 7, 7, 64) 76800 ['activation_6[0][0]']
conv2d_10 (Conv2D) (None, 7, 7, 96) 82944 ['activation_9[0][0]']
conv2d_11 (Conv2D) (None, 7, 7, 32) 6144 ['average_pooling2d[0][0]']
batch_normalization_5 (BatchNo (None, 7, 7, 64) 192 ['conv2d_5[0][0]']
rmalization)
batch_normalization_7 (BatchNo (None, 7, 7, 64) 192 ['conv2d_7[0][0]']
rmalization)
batch_normalization_10 (BatchN (None, 7, 7, 96) 288 ['conv2d_10[0][0]']
ormalization)
batch_normalization_11 (BatchN (None, 7, 7, 32) 96 ['conv2d_11[0][0]']
ormalization)
activation_5 (Activation) (None, 7, 7, 64) 0 ['batch_normalization_5[0][0]']
activation_7 (Activation) (None, 7, 7, 64) 0 ['batch_normalization_7[0][0]']
activation_10 (Activation) (None, 7, 7, 96) 0 ['batch_normalization_10[0][0]']
activation_11 (Activation) (None, 7, 7, 32) 0 ['batch_normalization_11[0][0]']
mixed0 (Concatenate) (None, 7, 7, 256) 0 ['activation_5[0][0]',
'activation_7[0][0]',
'activation_10[0][0]',
'activation_11[0][0]']
conv2d_15 (Conv2D) (None, 7, 7, 64) 16384 ['mixed0[0][0]']
batch_normalization_15 (BatchN (None, 7, 7, 64) 192 ['conv2d_15[0][0]']
ormalization)
activation_15 (Activation) (None, 7, 7, 64) 0 ['batch_normalization_15[0][0]']
conv2d_13 (Conv2D) (None, 7, 7, 48) 12288 ['mixed0[0][0]']
conv2d_16 (Conv2D) (None, 7, 7, 96) 55296 ['activation_15[0][0]']
batch_normalization_13 (BatchN (None, 7, 7, 48) 144 ['conv2d_13[0][0]']
ormalization)
batch_normalization_16 (BatchN (None, 7, 7, 96) 288 ['conv2d_16[0][0]']
ormalization)
activation_13 (Activation) (None, 7, 7, 48) 0 ['batch_normalization_13[0][0]']
activation_16 (Activation) (None, 7, 7, 96) 0 ['batch_normalization_16[0][0]']
average_pooling2d_1 (AveragePo (None, 7, 7, 256) 0 ['mixed0[0][0]']
oling2D)
conv2d_12 (Conv2D) (None, 7, 7, 64) 16384 ['mixed0[0][0]']
conv2d_14 (Conv2D) (None, 7, 7, 64) 76800 ['activation_13[0][0]']
conv2d_17 (Conv2D) (None, 7, 7, 96) 82944 ['activation_16[0][0]']
conv2d_18 (Conv2D) (None, 7, 7, 64) 16384 ['average_pooling2d_1[0][0]']
batch_normalization_12 (BatchN (None, 7, 7, 64) 192 ['conv2d_12[0][0]']
ormalization)
batch_normalization_14 (BatchN (None, 7, 7, 64) 192 ['conv2d_14[0][0]']
ormalization)
batch_normalization_17 (BatchN (None, 7, 7, 96) 288 ['conv2d_17[0][0]']
ormalization)
batch_normalization_18 (BatchN (None, 7, 7, 64) 192 ['conv2d_18[0][0]']
ormalization)
activation_12 (Activation) (None, 7, 7, 64) 0 ['batch_normalization_12[0][0]']
activation_14 (Activation) (None, 7, 7, 64) 0 ['batch_normalization_14[0][0]']
activation_17 (Activation) (None, 7, 7, 96) 0 ['batch_normalization_17[0][0]']
activation_18 (Activation) (None, 7, 7, 64) 0 ['batch_normalization_18[0][0]']
mixed1 (Concatenate) (None, 7, 7, 288) 0 ['activation_12[0][0]',
'activation_14[0][0]',
'activation_17[0][0]',
'activation_18[0][0]']
conv2d_22 (Conv2D) (None, 7, 7, 64) 18432 ['mixed1[0][0]']
batch_normalization_22 (BatchN (None, 7, 7, 64) 192 ['conv2d_22[0][0]']
ormalization)
activation_22 (Activation) (None, 7, 7, 64) 0 ['batch_normalization_22[0][0]']
conv2d_20 (Conv2D) (None, 7, 7, 48) 13824 ['mixed1[0][0]']
conv2d_23 (Conv2D) (None, 7, 7, 96) 55296 ['activation_22[0][0]']
batch_normalization_20 (BatchN (None, 7, 7, 48) 144 ['conv2d_20[0][0]']
ormalization)
batch_normalization_23 (BatchN (None, 7, 7, 96) 288 ['conv2d_23[0][0]']
ormalization)
activation_20 (Activation) (None, 7, 7, 48) 0 ['batch_normalization_20[0][0]']
activation_23 (Activation) (None, 7, 7, 96) 0 ['batch_normalization_23[0][0]']
average_pooling2d_2 (AveragePo (None, 7, 7, 288) 0 ['mixed1[0][0]']
oling2D)
conv2d_19 (Conv2D) (None, 7, 7, 64) 18432 ['mixed1[0][0]']
conv2d_21 (Conv2D) (None, 7, 7, 64) 76800 ['activation_20[0][0]']
conv2d_24 (Conv2D) (None, 7, 7, 96) 82944 ['activation_23[0][0]']
conv2d_25 (Conv2D) (None, 7, 7, 64) 18432 ['average_pooling2d_2[0][0]']
batch_normalization_19 (BatchN (None, 7, 7, 64) 192 ['conv2d_19[0][0]']
ormalization)
batch_normalization_21 (BatchN (None, 7, 7, 64) 192 ['conv2d_21[0][0]']
ormalization)
batch_normalization_24 (BatchN (None, 7, 7, 96) 288 ['conv2d_24[0][0]']
ormalization)
batch_normalization_25 (BatchN (None, 7, 7, 64) 192 ['conv2d_25[0][0]']
ormalization)
activation_19 (Activation) (None, 7, 7, 64) 0 ['batch_normalization_19[0][0]']
activation_21 (Activation) (None, 7, 7, 64) 0 ['batch_normalization_21[0][0]']
activation_24 (Activation) (None, 7, 7, 96) 0 ['batch_normalization_24[0][0]']
activation_25 (Activation) (None, 7, 7, 64) 0 ['batch_normalization_25[0][0]']
mixed2 (Concatenate) (None, 7, 7, 288) 0 ['activation_19[0][0]',
'activation_21[0][0]',
'activation_24[0][0]',
'activation_25[0][0]']
conv2d_27 (Conv2D) (None, 7, 7, 64) 18432 ['mixed2[0][0]']
batch_normalization_27 (BatchN (None, 7, 7, 64) 192 ['conv2d_27[0][0]']
ormalization)
activation_27 (Activation) (None, 7, 7, 64) 0 ['batch_normalization_27[0][0]']
conv2d_28 (Conv2D) (None, 7, 7, 96) 55296 ['activation_27[0][0]']
batch_normalization_28 (BatchN (None, 7, 7, 96) 288 ['conv2d_28[0][0]']
ormalization)
activation_28 (Activation) (None, 7, 7, 96) 0 ['batch_normalization_28[0][0]']
conv2d_26 (Conv2D) (None, 3, 3, 384) 995328 ['mixed2[0][0]']
conv2d_29 (Conv2D) (None, 3, 3, 96) 82944 ['activation_28[0][0]']
batch_normalization_26 (BatchN (None, 3, 3, 384) 1152 ['conv2d_26[0][0]']
ormalization)
batch_normalization_29 (BatchN (None, 3, 3, 96) 288 ['conv2d_29[0][0]']
ormalization)
activation_26 (Activation) (None, 3, 3, 384) 0 ['batch_normalization_26[0][0]']
activation_29 (Activation) (None, 3, 3, 96) 0 ['batch_normalization_29[0][0]']
max_pooling2d_2 (MaxPooling2D) (None, 3, 3, 288) 0 ['mixed2[0][0]']
mixed3 (Concatenate) (None, 3, 3, 768) 0 ['activation_26[0][0]',
'activation_29[0][0]',
'max_pooling2d_2[0][0]']
conv2d_34 (Conv2D) (None, 3, 3, 128) 98304 ['mixed3[0][0]']
batch_normalization_34 (BatchN (None, 3, 3, 128) 384 ['conv2d_34[0][0]']
ormalization)
activation_34 (Activation) (None, 3, 3, 128) 0 ['batch_normalization_34[0][0]']
conv2d_35 (Conv2D) (None, 3, 3, 128) 114688 ['activation_34[0][0]']
batch_normalization_35 (BatchN (None, 3, 3, 128) 384 ['conv2d_35[0][0]']
ormalization)
activation_35 (Activation) (None, 3, 3, 128) 0 ['batch_normalization_35[0][0]']
conv2d_31 (Conv2D) (None, 3, 3, 128) 98304 ['mixed3[0][0]']
conv2d_36 (Conv2D) (None, 3, 3, 128) 114688 ['activation_35[0][0]']
batch_normalization_31 (BatchN (None, 3, 3, 128) 384 ['conv2d_31[0][0]']
ormalization)
batch_normalization_36 (BatchN (None, 3, 3, 128) 384 ['conv2d_36[0][0]']
ormalization)
activation_31 (Activation) (None, 3, 3, 128) 0 ['batch_normalization_31[0][0]']
activation_36 (Activation) (None, 3, 3, 128) 0 ['batch_normalization_36[0][0]']
conv2d_32 (Conv2D) (None, 3, 3, 128) 114688 ['activation_31[0][0]']
conv2d_37 (Conv2D) (None, 3, 3, 128) 114688 ['activation_36[0][0]']
batch_normalization_32 (BatchN (None, 3, 3, 128) 384 ['conv2d_32[0][0]']
ormalization)
batch_normalization_37 (BatchN (None, 3, 3, 128) 384 ['conv2d_37[0][0]']
ormalization)
activation_32 (Activation) (None, 3, 3, 128) 0 ['batch_normalization_32[0][0]']
activation_37 (Activation) (None, 3, 3, 128) 0 ['batch_normalization_37[0][0]']
average_pooling2d_3 (AveragePo (None, 3, 3, 768) 0 ['mixed3[0][0]']
oling2D)
conv2d_30 (Conv2D) (None, 3, 3, 192) 147456 ['mixed3[0][0]']
conv2d_33 (Conv2D) (None, 3, 3, 192) 172032 ['activation_32[0][0]']
conv2d_38 (Conv2D) (None, 3, 3, 192) 172032 ['activation_37[0][0]']
conv2d_39 (Conv2D) (None, 3, 3, 192) 147456 ['average_pooling2d_3[0][0]']
batch_normalization_30 (BatchN (None, 3, 3, 192) 576 ['conv2d_30[0][0]']
ormalization)
batch_normalization_33 (BatchN (None, 3, 3, 192) 576 ['conv2d_33[0][0]']
ormalization)
batch_normalization_38 (BatchN (None, 3, 3, 192) 576 ['conv2d_38[0][0]']
ormalization)
batch_normalization_39 (BatchN (None, 3, 3, 192) 576 ['conv2d_39[0][0]']
ormalization)
activation_30 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_30[0][0]']
activation_33 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_33[0][0]']
activation_38 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_38[0][0]']
activation_39 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_39[0][0]']
mixed4 (Concatenate) (None, 3, 3, 768) 0 ['activation_30[0][0]',
'activation_33[0][0]',
'activation_38[0][0]',
'activation_39[0][0]']
conv2d_44 (Conv2D) (None, 3, 3, 160) 122880 ['mixed4[0][0]']
batch_normalization_44 (BatchN (None, 3, 3, 160) 480 ['conv2d_44[0][0]']
ormalization)
activation_44 (Activation) (None, 3, 3, 160) 0 ['batch_normalization_44[0][0]']
conv2d_45 (Conv2D) (None, 3, 3, 160) 179200 ['activation_44[0][0]']
batch_normalization_45 (BatchN (None, 3, 3, 160) 480 ['conv2d_45[0][0]']
ormalization)
activation_45 (Activation) (None, 3, 3, 160) 0 ['batch_normalization_45[0][0]']
conv2d_41 (Conv2D) (None, 3, 3, 160) 122880 ['mixed4[0][0]']
conv2d_46 (Conv2D) (None, 3, 3, 160) 179200 ['activation_45[0][0]']
batch_normalization_41 (BatchN (None, 3, 3, 160) 480 ['conv2d_41[0][0]']
ormalization)
batch_normalization_46 (BatchN (None, 3, 3, 160) 480 ['conv2d_46[0][0]']
ormalization)
activation_41 (Activation) (None, 3, 3, 160) 0 ['batch_normalization_41[0][0]']
activation_46 (Activation) (None, 3, 3, 160) 0 ['batch_normalization_46[0][0]']
conv2d_42 (Conv2D) (None, 3, 3, 160) 179200 ['activation_41[0][0]']
conv2d_47 (Conv2D) (None, 3, 3, 160) 179200 ['activation_46[0][0]']
batch_normalization_42 (BatchN (None, 3, 3, 160) 480 ['conv2d_42[0][0]']
ormalization)
batch_normalization_47 (BatchN (None, 3, 3, 160) 480 ['conv2d_47[0][0]']
ormalization)
activation_42 (Activation) (None, 3, 3, 160) 0 ['batch_normalization_42[0][0]']
activation_47 (Activation) (None, 3, 3, 160) 0 ['batch_normalization_47[0][0]']
average_pooling2d_4 (AveragePo (None, 3, 3, 768) 0 ['mixed4[0][0]']
oling2D)
conv2d_40 (Conv2D) (None, 3, 3, 192) 147456 ['mixed4[0][0]']
conv2d_43 (Conv2D) (None, 3, 3, 192) 215040 ['activation_42[0][0]']
conv2d_48 (Conv2D) (None, 3, 3, 192) 215040 ['activation_47[0][0]']
conv2d_49 (Conv2D) (None, 3, 3, 192) 147456 ['average_pooling2d_4[0][0]']
batch_normalization_40 (BatchN (None, 3, 3, 192) 576 ['conv2d_40[0][0]']
ormalization)
batch_normalization_43 (BatchN (None, 3, 3, 192) 576 ['conv2d_43[0][0]']
ormalization)
batch_normalization_48 (BatchN (None, 3, 3, 192) 576 ['conv2d_48[0][0]']
ormalization)
batch_normalization_49 (BatchN (None, 3, 3, 192) 576 ['conv2d_49[0][0]']
ormalization)
activation_40 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_40[0][0]']
activation_43 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_43[0][0]']
activation_48 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_48[0][0]']
activation_49 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_49[0][0]']
mixed5 (Concatenate) (None, 3, 3, 768) 0 ['activation_40[0][0]',
'activation_43[0][0]',
'activation_48[0][0]',
'activation_49[0][0]']
conv2d_54 (Conv2D) (None, 3, 3, 160) 122880 ['mixed5[0][0]']
batch_normalization_54 (BatchN (None, 3, 3, 160) 480 ['conv2d_54[0][0]']
ormalization)
activation_54 (Activation) (None, 3, 3, 160) 0 ['batch_normalization_54[0][0]']
conv2d_55 (Conv2D) (None, 3, 3, 160) 179200 ['activation_54[0][0]']
batch_normalization_55 (BatchN (None, 3, 3, 160) 480 ['conv2d_55[0][0]']
ormalization)
activation_55 (Activation) (None, 3, 3, 160) 0 ['batch_normalization_55[0][0]']
conv2d_51 (Conv2D) (None, 3, 3, 160) 122880 ['mixed5[0][0]']
conv2d_56 (Conv2D) (None, 3, 3, 160) 179200 ['activation_55[0][0]']
batch_normalization_51 (BatchN (None, 3, 3, 160) 480 ['conv2d_51[0][0]']
ormalization)
batch_normalization_56 (BatchN (None, 3, 3, 160) 480 ['conv2d_56[0][0]']
ormalization)
activation_51 (Activation) (None, 3, 3, 160) 0 ['batch_normalization_51[0][0]']
activation_56 (Activation) (None, 3, 3, 160) 0 ['batch_normalization_56[0][0]']
conv2d_52 (Conv2D) (None, 3, 3, 160) 179200 ['activation_51[0][0]']
conv2d_57 (Conv2D) (None, 3, 3, 160) 179200 ['activation_56[0][0]']
batch_normalization_52 (BatchN (None, 3, 3, 160) 480 ['conv2d_52[0][0]']
ormalization)
batch_normalization_57 (BatchN (None, 3, 3, 160) 480 ['conv2d_57[0][0]']
ormalization)
activation_52 (Activation) (None, 3, 3, 160) 0 ['batch_normalization_52[0][0]']
activation_57 (Activation) (None, 3, 3, 160) 0 ['batch_normalization_57[0][0]']
average_pooling2d_5 (AveragePo (None, 3, 3, 768) 0 ['mixed5[0][0]']
oling2D)
conv2d_50 (Conv2D) (None, 3, 3, 192) 147456 ['mixed5[0][0]']
conv2d_53 (Conv2D) (None, 3, 3, 192) 215040 ['activation_52[0][0]']
conv2d_58 (Conv2D) (None, 3, 3, 192) 215040 ['activation_57[0][0]']
conv2d_59 (Conv2D) (None, 3, 3, 192) 147456 ['average_pooling2d_5[0][0]']
batch_normalization_50 (BatchN (None, 3, 3, 192) 576 ['conv2d_50[0][0]']
ormalization)
batch_normalization_53 (BatchN (None, 3, 3, 192) 576 ['conv2d_53[0][0]']
ormalization)
batch_normalization_58 (BatchN (None, 3, 3, 192) 576 ['conv2d_58[0][0]']
ormalization)
batch_normalization_59 (BatchN (None, 3, 3, 192) 576 ['conv2d_59[0][0]']
ormalization)
activation_50 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_50[0][0]']
activation_53 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_53[0][0]']
activation_58 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_58[0][0]']
activation_59 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_59[0][0]']
mixed6 (Concatenate) (None, 3, 3, 768) 0 ['activation_50[0][0]',
'activation_53[0][0]',
'activation_58[0][0]',
'activation_59[0][0]']
conv2d_64 (Conv2D) (None, 3, 3, 192) 147456 ['mixed6[0][0]']
batch_normalization_64 (BatchN (None, 3, 3, 192) 576 ['conv2d_64[0][0]']
ormalization)
activation_64 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_64[0][0]']
conv2d_65 (Conv2D) (None, 3, 3, 192) 258048 ['activation_64[0][0]']
batch_normalization_65 (BatchN (None, 3, 3, 192) 576 ['conv2d_65[0][0]']
ormalization)
activation_65 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_65[0][0]']
conv2d_61 (Conv2D) (None, 3, 3, 192) 147456 ['mixed6[0][0]']
conv2d_66 (Conv2D) (None, 3, 3, 192) 258048 ['activation_65[0][0]']
batch_normalization_61 (BatchN (None, 3, 3, 192) 576 ['conv2d_61[0][0]']
ormalization)
batch_normalization_66 (BatchN (None, 3, 3, 192) 576 ['conv2d_66[0][0]']
ormalization)
activation_61 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_61[0][0]']
activation_66 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_66[0][0]']
conv2d_62 (Conv2D) (None, 3, 3, 192) 258048 ['activation_61[0][0]']
conv2d_67 (Conv2D) (None, 3, 3, 192) 258048 ['activation_66[0][0]']
batch_normalization_62 (BatchN (None, 3, 3, 192) 576 ['conv2d_62[0][0]']
ormalization)
batch_normalization_67 (BatchN (None, 3, 3, 192) 576 ['conv2d_67[0][0]']
ormalization)
activation_62 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_62[0][0]']
activation_67 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_67[0][0]']
average_pooling2d_6 (AveragePo (None, 3, 3, 768) 0 ['mixed6[0][0]']
oling2D)
conv2d_60 (Conv2D) (None, 3, 3, 192) 147456 ['mixed6[0][0]']
conv2d_63 (Conv2D) (None, 3, 3, 192) 258048 ['activation_62[0][0]']
conv2d_68 (Conv2D) (None, 3, 3, 192) 258048 ['activation_67[0][0]']
conv2d_69 (Conv2D) (None, 3, 3, 192) 147456 ['average_pooling2d_6[0][0]']
batch_normalization_60 (BatchN (None, 3, 3, 192) 576 ['conv2d_60[0][0]']
ormalization)
batch_normalization_63 (BatchN (None, 3, 3, 192) 576 ['conv2d_63[0][0]']
ormalization)
batch_normalization_68 (BatchN (None, 3, 3, 192) 576 ['conv2d_68[0][0]']
ormalization)
batch_normalization_69 (BatchN (None, 3, 3, 192) 576 ['conv2d_69[0][0]']
ormalization)
activation_60 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_60[0][0]']
activation_63 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_63[0][0]']
activation_68 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_68[0][0]']
activation_69 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_69[0][0]']
mixed7 (Concatenate) (None, 3, 3, 768) 0 ['activation_60[0][0]',
'activation_63[0][0]',
'activation_68[0][0]',
'activation_69[0][0]']
conv2d_72 (Conv2D) (None, 3, 3, 192) 147456 ['mixed7[0][0]']
batch_normalization_72 (BatchN (None, 3, 3, 192) 576 ['conv2d_72[0][0]']
ormalization)
activation_72 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_72[0][0]']
conv2d_73 (Conv2D) (None, 3, 3, 192) 258048 ['activation_72[0][0]']
batch_normalization_73 (BatchN (None, 3, 3, 192) 576 ['conv2d_73[0][0]']
ormalization)
activation_73 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_73[0][0]']
conv2d_70 (Conv2D) (None, 3, 3, 192) 147456 ['mixed7[0][0]']
conv2d_74 (Conv2D) (None, 3, 3, 192) 258048 ['activation_73[0][0]']
batch_normalization_70 (BatchN (None, 3, 3, 192) 576 ['conv2d_70[0][0]']
ormalization)
batch_normalization_74 (BatchN (None, 3, 3, 192) 576 ['conv2d_74[0][0]']
ormalization)
activation_70 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_70[0][0]']
activation_74 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_74[0][0]']
conv2d_71 (Conv2D) (None, 1, 1, 320) 552960 ['activation_70[0][0]']
conv2d_75 (Conv2D) (None, 1, 1, 192) 331776 ['activation_74[0][0]']
batch_normalization_71 (BatchN (None, 1, 1, 320) 960 ['conv2d_71[0][0]']
ormalization)
batch_normalization_75 (BatchN (None, 1, 1, 192) 576 ['conv2d_75[0][0]']
ormalization)
activation_71 (Activation) (None, 1, 1, 320) 0 ['batch_normalization_71[0][0]']
activation_75 (Activation) (None, 1, 1, 192) 0 ['batch_normalization_75[0][0]']
max_pooling2d_3 (MaxPooling2D) (None, 1, 1, 768) 0 ['mixed7[0][0]']
mixed8 (Concatenate) (None, 1, 1, 1280) 0 ['activation_71[0][0]',
'activation_75[0][0]',
'max_pooling2d_3[0][0]']
conv2d_80 (Conv2D) (None, 1, 1, 448) 573440 ['mixed8[0][0]']
batch_normalization_80 (BatchN (None, 1, 1, 448) 1344 ['conv2d_80[0][0]']
ormalization)
activation_80 (Activation) (None, 1, 1, 448) 0 ['batch_normalization_80[0][0]']
conv2d_77 (Conv2D) (None, 1, 1, 384) 491520 ['mixed8[0][0]']
conv2d_81 (Conv2D) (None, 1, 1, 384) 1548288 ['activation_80[0][0]']
batch_normalization_77 (BatchN (None, 1, 1, 384) 1152 ['conv2d_77[0][0]']
ormalization)
batch_normalization_81 (BatchN (None, 1, 1, 384) 1152 ['conv2d_81[0][0]']
ormalization)
activation_77 (Activation) (None, 1, 1, 384) 0 ['batch_normalization_77[0][0]']
activation_81 (Activation) (None, 1, 1, 384) 0 ['batch_normalization_81[0][0]']
conv2d_78 (Conv2D) (None, 1, 1, 384) 442368 ['activation_77[0][0]']
conv2d_79 (Conv2D) (None, 1, 1, 384) 442368 ['activation_77[0][0]']
conv2d_82 (Conv2D) (None, 1, 1, 384) 442368 ['activation_81[0][0]']
conv2d_83 (Conv2D) (None, 1, 1, 384) 442368 ['activation_81[0][0]']
average_pooling2d_7 (AveragePo (None, 1, 1, 1280) 0 ['mixed8[0][0]']
oling2D)
conv2d_76 (Conv2D) (None, 1, 1, 320) 409600 ['mixed8[0][0]']
batch_normalization_78 (BatchN (None, 1, 1, 384) 1152 ['conv2d_78[0][0]']
ormalization)
batch_normalization_79 (BatchN (None, 1, 1, 384) 1152 ['conv2d_79[0][0]']
ormalization)
batch_normalization_82 (BatchN (None, 1, 1, 384) 1152 ['conv2d_82[0][0]']
ormalization)
batch_normalization_83 (BatchN (None, 1, 1, 384) 1152 ['conv2d_83[0][0]']
ormalization)
conv2d_84 (Conv2D) (None, 1, 1, 192) 245760 ['average_pooling2d_7[0][0]']
batch_normalization_76 (BatchN (None, 1, 1, 320) 960 ['conv2d_76[0][0]']
ormalization)
activation_78 (Activation) (None, 1, 1, 384) 0 ['batch_normalization_78[0][0]']
activation_79 (Activation) (None, 1, 1, 384) 0 ['batch_normalization_79[0][0]']
activation_82 (Activation) (None, 1, 1, 384) 0 ['batch_normalization_82[0][0]']
activation_83 (Activation) (None, 1, 1, 384) 0 ['batch_normalization_83[0][0]']
batch_normalization_84 (BatchN (None, 1, 1, 192) 576 ['conv2d_84[0][0]']
ormalization)
activation_76 (Activation) (None, 1, 1, 320) 0 ['batch_normalization_76[0][0]']
mixed9_0 (Concatenate) (None, 1, 1, 768) 0 ['activation_78[0][0]',
'activation_79[0][0]']
concatenate (Concatenate) (None, 1, 1, 768) 0 ['activation_82[0][0]',
'activation_83[0][0]']
activation_84 (Activation) (None, 1, 1, 192) 0 ['batch_normalization_84[0][0]']
mixed9 (Concatenate) (None, 1, 1, 2048) 0 ['activation_76[0][0]',
'mixed9_0[0][0]',
'concatenate[0][0]',
'activation_84[0][0]']
conv2d_89 (Conv2D) (None, 1, 1, 448) 917504 ['mixed9[0][0]']
batch_normalization_89 (BatchN (None, 1, 1, 448) 1344 ['conv2d_89[0][0]']
ormalization)
activation_89 (Activation) (None, 1, 1, 448) 0 ['batch_normalization_89[0][0]']
conv2d_86 (Conv2D) (None, 1, 1, 384) 786432 ['mixed9[0][0]']
conv2d_90 (Conv2D) (None, 1, 1, 384) 1548288 ['activation_89[0][0]']
batch_normalization_86 (BatchN (None, 1, 1, 384) 1152 ['conv2d_86[0][0]']
ormalization)
batch_normalization_90 (BatchN (None, 1, 1, 384) 1152 ['conv2d_90[0][0]']
ormalization)
activation_86 (Activation) (None, 1, 1, 384) 0 ['batch_normalization_86[0][0]']
activation_90 (Activation) (None, 1, 1, 384) 0 ['batch_normalization_90[0][0]']
conv2d_87 (Conv2D) (None, 1, 1, 384) 442368 ['activation_86[0][0]']
conv2d_88 (Conv2D) (None, 1, 1, 384) 442368 ['activation_86[0][0]']
conv2d_91 (Conv2D) (None, 1, 1, 384) 442368 ['activation_90[0][0]']
conv2d_92 (Conv2D) (None, 1, 1, 384) 442368 ['activation_90[0][0]']
average_pooling2d_8 (AveragePo (None, 1, 1, 2048) 0 ['mixed9[0][0]']
oling2D)
conv2d_85 (Conv2D) (None, 1, 1, 320) 655360 ['mixed9[0][0]']
batch_normalization_87 (BatchN (None, 1, 1, 384) 1152 ['conv2d_87[0][0]']
ormalization)
batch_normalization_88 (BatchN (None, 1, 1, 384) 1152 ['conv2d_88[0][0]']
ormalization)
batch_normalization_91 (BatchN (None, 1, 1, 384) 1152 ['conv2d_91[0][0]']
ormalization)
batch_normalization_92 (BatchN (None, 1, 1, 384) 1152 ['conv2d_92[0][0]']
ormalization)
conv2d_93 (Conv2D) (None, 1, 1, 192) 393216 ['average_pooling2d_8[0][0]']
batch_normalization_85 (BatchN (None, 1, 1, 320) 960 ['conv2d_85[0][0]']
ormalization)
activation_87 (Activation) (None, 1, 1, 384) 0 ['batch_normalization_87[0][0]']
activation_88 (Activation) (None, 1, 1, 384) 0 ['batch_normalization_88[0][0]']
activation_91 (Activation) (None, 1, 1, 384) 0 ['batch_normalization_91[0][0]']
activation_92 (Activation) (None, 1, 1, 384) 0 ['batch_normalization_92[0][0]']
batch_normalization_93 (BatchN (None, 1, 1, 192) 576 ['conv2d_93[0][0]']
ormalization)
activation_85 (Activation) (None, 1, 1, 320) 0 ['batch_normalization_85[0][0]']
mixed9_1 (Concatenate) (None, 1, 1, 768) 0 ['activation_87[0][0]',
'activation_88[0][0]']
concatenate_1 (Concatenate) (None, 1, 1, 768) 0 ['activation_91[0][0]',
'activation_92[0][0]']
activation_93 (Activation) (None, 1, 1, 192) 0 ['batch_normalization_93[0][0]']
mixed10 (Concatenate) (None, 1, 1, 2048) 0 ['activation_85[0][0]',
'mixed9_1[0][0]',
'concatenate_1[0][0]',
'activation_93[0][0]']
flatten (Flatten) (None, 2048) 0 ['mixed10[0][0]']
dense (Dense) (None, 256) 524544 ['flatten[0][0]']
dense_1 (Dense) (None, 128) 32896 ['dense[0][0]']
dropout (Dropout) (None, 128) 0 ['dense_1[0][0]']
dense_2 (Dense) (None, 64) 8256 ['dropout[0][0]']
batch_normalization_94 (BatchN (None, 64) 256 ['dense_2[0][0]']
ormalization)
dense_3 (Dense) (None, 2) 130 ['batch_normalization_94[0][0]']
==================================================================================================
Total params: 22,368,866
Trainable params: 22,334,306
Non-trainable params: 34,560
__________________________________________________________________________________________________
Epoch 1/20
624/624 [==============================] - 504s 794ms/step - loss: 0.5011 - accuracy: 0.7481 - val_loss: 3.4299 - val_accuracy: 0.0000e+00
Epoch 2/20
624/624 [==============================] - 496s 796ms/step - loss: 0.5500 - accuracy: 0.7197 - val_loss: 0.3839 - val_accuracy: 0.8329
Epoch 3/20
624/624 [==============================] - 497s 797ms/step - loss: 0.3460 - accuracy: 0.8443 - val_loss: 1.2636 - val_accuracy: 0.6066
Epoch 4/20
624/624 [==============================] - 495s 794ms/step - loss: 0.3091 - accuracy: 0.8683 - val_loss: 1.0633 - val_accuracy: 0.0481
Calculating Accuracy:
82/82 [==============================] - 9s 94ms/step - loss: 0.7079 - accuracy: 0.5000
Test Accuracy: 0.5
Classification Report
precision recall f1-score support
0 0.50 1.00 0.67 1300
1 0.00 0.00 0.00 1300
accuracy 0.50 2600
macro avg 0.25 0.50 0.33 2600
weighted avg 0.25 0.50 0.33 2600
Confusion Matrix
Train and Validation Accuracy
model43 = ModelWithTransferLearningInceptionV3(hsvAugmentedData_increasedSize, 'Model With Transfer Learning Inception V3 | HSV data | Data augmentation')
model43.execute()
Model: "inception_v3"
__________________________________________________________________________________________________
Layer (type) Output Shape Param # Connected to
==================================================================================================
input_1 (InputLayer) [(None, 75, 75, 3)] 0 []
conv2d (Conv2D) (None, 37, 37, 32) 864 ['input_1[0][0]']
batch_normalization (BatchNorm (None, 37, 37, 32) 96 ['conv2d[0][0]']
alization)
activation (Activation) (None, 37, 37, 32) 0 ['batch_normalization[0][0]']
conv2d_1 (Conv2D) (None, 35, 35, 32) 9216 ['activation[0][0]']
batch_normalization_1 (BatchNo (None, 35, 35, 32) 96 ['conv2d_1[0][0]']
rmalization)
activation_1 (Activation) (None, 35, 35, 32) 0 ['batch_normalization_1[0][0]']
conv2d_2 (Conv2D) (None, 35, 35, 64) 18432 ['activation_1[0][0]']
batch_normalization_2 (BatchNo (None, 35, 35, 64) 192 ['conv2d_2[0][0]']
rmalization)
activation_2 (Activation) (None, 35, 35, 64) 0 ['batch_normalization_2[0][0]']
max_pooling2d (MaxPooling2D) (None, 17, 17, 64) 0 ['activation_2[0][0]']
conv2d_3 (Conv2D) (None, 17, 17, 80) 5120 ['max_pooling2d[0][0]']
batch_normalization_3 (BatchNo (None, 17, 17, 80) 240 ['conv2d_3[0][0]']
rmalization)
activation_3 (Activation) (None, 17, 17, 80) 0 ['batch_normalization_3[0][0]']
conv2d_4 (Conv2D) (None, 15, 15, 192) 138240 ['activation_3[0][0]']
batch_normalization_4 (BatchNo (None, 15, 15, 192) 576 ['conv2d_4[0][0]']
rmalization)
activation_4 (Activation) (None, 15, 15, 192) 0 ['batch_normalization_4[0][0]']
max_pooling2d_1 (MaxPooling2D) (None, 7, 7, 192) 0 ['activation_4[0][0]']
conv2d_8 (Conv2D) (None, 7, 7, 64) 12288 ['max_pooling2d_1[0][0]']
batch_normalization_8 (BatchNo (None, 7, 7, 64) 192 ['conv2d_8[0][0]']
rmalization)
activation_8 (Activation) (None, 7, 7, 64) 0 ['batch_normalization_8[0][0]']
conv2d_6 (Conv2D) (None, 7, 7, 48) 9216 ['max_pooling2d_1[0][0]']
conv2d_9 (Conv2D) (None, 7, 7, 96) 55296 ['activation_8[0][0]']
batch_normalization_6 (BatchNo (None, 7, 7, 48) 144 ['conv2d_6[0][0]']
rmalization)
batch_normalization_9 (BatchNo (None, 7, 7, 96) 288 ['conv2d_9[0][0]']
rmalization)
activation_6 (Activation) (None, 7, 7, 48) 0 ['batch_normalization_6[0][0]']
activation_9 (Activation) (None, 7, 7, 96) 0 ['batch_normalization_9[0][0]']
average_pooling2d (AveragePool (None, 7, 7, 192) 0 ['max_pooling2d_1[0][0]']
ing2D)
conv2d_5 (Conv2D) (None, 7, 7, 64) 12288 ['max_pooling2d_1[0][0]']
conv2d_7 (Conv2D) (None, 7, 7, 64) 76800 ['activation_6[0][0]']
conv2d_10 (Conv2D) (None, 7, 7, 96) 82944 ['activation_9[0][0]']
conv2d_11 (Conv2D) (None, 7, 7, 32) 6144 ['average_pooling2d[0][0]']
batch_normalization_5 (BatchNo (None, 7, 7, 64) 192 ['conv2d_5[0][0]']
rmalization)
batch_normalization_7 (BatchNo (None, 7, 7, 64) 192 ['conv2d_7[0][0]']
rmalization)
batch_normalization_10 (BatchN (None, 7, 7, 96) 288 ['conv2d_10[0][0]']
ormalization)
batch_normalization_11 (BatchN (None, 7, 7, 32) 96 ['conv2d_11[0][0]']
ormalization)
activation_5 (Activation) (None, 7, 7, 64) 0 ['batch_normalization_5[0][0]']
activation_7 (Activation) (None, 7, 7, 64) 0 ['batch_normalization_7[0][0]']
activation_10 (Activation) (None, 7, 7, 96) 0 ['batch_normalization_10[0][0]']
activation_11 (Activation) (None, 7, 7, 32) 0 ['batch_normalization_11[0][0]']
mixed0 (Concatenate) (None, 7, 7, 256) 0 ['activation_5[0][0]',
'activation_7[0][0]',
'activation_10[0][0]',
'activation_11[0][0]']
conv2d_15 (Conv2D) (None, 7, 7, 64) 16384 ['mixed0[0][0]']
batch_normalization_15 (BatchN (None, 7, 7, 64) 192 ['conv2d_15[0][0]']
ormalization)
activation_15 (Activation) (None, 7, 7, 64) 0 ['batch_normalization_15[0][0]']
conv2d_13 (Conv2D) (None, 7, 7, 48) 12288 ['mixed0[0][0]']
conv2d_16 (Conv2D) (None, 7, 7, 96) 55296 ['activation_15[0][0]']
batch_normalization_13 (BatchN (None, 7, 7, 48) 144 ['conv2d_13[0][0]']
ormalization)
batch_normalization_16 (BatchN (None, 7, 7, 96) 288 ['conv2d_16[0][0]']
ormalization)
activation_13 (Activation) (None, 7, 7, 48) 0 ['batch_normalization_13[0][0]']
activation_16 (Activation) (None, 7, 7, 96) 0 ['batch_normalization_16[0][0]']
average_pooling2d_1 (AveragePo (None, 7, 7, 256) 0 ['mixed0[0][0]']
oling2D)
conv2d_12 (Conv2D) (None, 7, 7, 64) 16384 ['mixed0[0][0]']
conv2d_14 (Conv2D) (None, 7, 7, 64) 76800 ['activation_13[0][0]']
conv2d_17 (Conv2D) (None, 7, 7, 96) 82944 ['activation_16[0][0]']
conv2d_18 (Conv2D) (None, 7, 7, 64) 16384 ['average_pooling2d_1[0][0]']
batch_normalization_12 (BatchN (None, 7, 7, 64) 192 ['conv2d_12[0][0]']
ormalization)
batch_normalization_14 (BatchN (None, 7, 7, 64) 192 ['conv2d_14[0][0]']
ormalization)
batch_normalization_17 (BatchN (None, 7, 7, 96) 288 ['conv2d_17[0][0]']
ormalization)
batch_normalization_18 (BatchN (None, 7, 7, 64) 192 ['conv2d_18[0][0]']
ormalization)
activation_12 (Activation) (None, 7, 7, 64) 0 ['batch_normalization_12[0][0]']
activation_14 (Activation) (None, 7, 7, 64) 0 ['batch_normalization_14[0][0]']
activation_17 (Activation) (None, 7, 7, 96) 0 ['batch_normalization_17[0][0]']
activation_18 (Activation) (None, 7, 7, 64) 0 ['batch_normalization_18[0][0]']
mixed1 (Concatenate) (None, 7, 7, 288) 0 ['activation_12[0][0]',
'activation_14[0][0]',
'activation_17[0][0]',
'activation_18[0][0]']
conv2d_22 (Conv2D) (None, 7, 7, 64) 18432 ['mixed1[0][0]']
batch_normalization_22 (BatchN (None, 7, 7, 64) 192 ['conv2d_22[0][0]']
ormalization)
activation_22 (Activation) (None, 7, 7, 64) 0 ['batch_normalization_22[0][0]']
conv2d_20 (Conv2D) (None, 7, 7, 48) 13824 ['mixed1[0][0]']
conv2d_23 (Conv2D) (None, 7, 7, 96) 55296 ['activation_22[0][0]']
batch_normalization_20 (BatchN (None, 7, 7, 48) 144 ['conv2d_20[0][0]']
ormalization)
batch_normalization_23 (BatchN (None, 7, 7, 96) 288 ['conv2d_23[0][0]']
ormalization)
activation_20 (Activation) (None, 7, 7, 48) 0 ['batch_normalization_20[0][0]']
activation_23 (Activation) (None, 7, 7, 96) 0 ['batch_normalization_23[0][0]']
average_pooling2d_2 (AveragePo (None, 7, 7, 288) 0 ['mixed1[0][0]']
oling2D)
conv2d_19 (Conv2D) (None, 7, 7, 64) 18432 ['mixed1[0][0]']
conv2d_21 (Conv2D) (None, 7, 7, 64) 76800 ['activation_20[0][0]']
conv2d_24 (Conv2D) (None, 7, 7, 96) 82944 ['activation_23[0][0]']
conv2d_25 (Conv2D) (None, 7, 7, 64) 18432 ['average_pooling2d_2[0][0]']
batch_normalization_19 (BatchN (None, 7, 7, 64) 192 ['conv2d_19[0][0]']
ormalization)
batch_normalization_21 (BatchN (None, 7, 7, 64) 192 ['conv2d_21[0][0]']
ormalization)
batch_normalization_24 (BatchN (None, 7, 7, 96) 288 ['conv2d_24[0][0]']
ormalization)
batch_normalization_25 (BatchN (None, 7, 7, 64) 192 ['conv2d_25[0][0]']
ormalization)
activation_19 (Activation) (None, 7, 7, 64) 0 ['batch_normalization_19[0][0]']
activation_21 (Activation) (None, 7, 7, 64) 0 ['batch_normalization_21[0][0]']
activation_24 (Activation) (None, 7, 7, 96) 0 ['batch_normalization_24[0][0]']
activation_25 (Activation) (None, 7, 7, 64) 0 ['batch_normalization_25[0][0]']
mixed2 (Concatenate) (None, 7, 7, 288) 0 ['activation_19[0][0]',
'activation_21[0][0]',
'activation_24[0][0]',
'activation_25[0][0]']
conv2d_27 (Conv2D) (None, 7, 7, 64) 18432 ['mixed2[0][0]']
batch_normalization_27 (BatchN (None, 7, 7, 64) 192 ['conv2d_27[0][0]']
ormalization)
activation_27 (Activation) (None, 7, 7, 64) 0 ['batch_normalization_27[0][0]']
conv2d_28 (Conv2D) (None, 7, 7, 96) 55296 ['activation_27[0][0]']
batch_normalization_28 (BatchN (None, 7, 7, 96) 288 ['conv2d_28[0][0]']
ormalization)
activation_28 (Activation) (None, 7, 7, 96) 0 ['batch_normalization_28[0][0]']
conv2d_26 (Conv2D) (None, 3, 3, 384) 995328 ['mixed2[0][0]']
conv2d_29 (Conv2D) (None, 3, 3, 96) 82944 ['activation_28[0][0]']
batch_normalization_26 (BatchN (None, 3, 3, 384) 1152 ['conv2d_26[0][0]']
ormalization)
batch_normalization_29 (BatchN (None, 3, 3, 96) 288 ['conv2d_29[0][0]']
ormalization)
activation_26 (Activation) (None, 3, 3, 384) 0 ['batch_normalization_26[0][0]']
activation_29 (Activation) (None, 3, 3, 96) 0 ['batch_normalization_29[0][0]']
max_pooling2d_2 (MaxPooling2D) (None, 3, 3, 288) 0 ['mixed2[0][0]']
mixed3 (Concatenate) (None, 3, 3, 768) 0 ['activation_26[0][0]',
'activation_29[0][0]',
'max_pooling2d_2[0][0]']
conv2d_34 (Conv2D) (None, 3, 3, 128) 98304 ['mixed3[0][0]']
batch_normalization_34 (BatchN (None, 3, 3, 128) 384 ['conv2d_34[0][0]']
ormalization)
activation_34 (Activation) (None, 3, 3, 128) 0 ['batch_normalization_34[0][0]']
conv2d_35 (Conv2D) (None, 3, 3, 128) 114688 ['activation_34[0][0]']
batch_normalization_35 (BatchN (None, 3, 3, 128) 384 ['conv2d_35[0][0]']
ormalization)
activation_35 (Activation) (None, 3, 3, 128) 0 ['batch_normalization_35[0][0]']
conv2d_31 (Conv2D) (None, 3, 3, 128) 98304 ['mixed3[0][0]']
conv2d_36 (Conv2D) (None, 3, 3, 128) 114688 ['activation_35[0][0]']
batch_normalization_31 (BatchN (None, 3, 3, 128) 384 ['conv2d_31[0][0]']
ormalization)
batch_normalization_36 (BatchN (None, 3, 3, 128) 384 ['conv2d_36[0][0]']
ormalization)
activation_31 (Activation) (None, 3, 3, 128) 0 ['batch_normalization_31[0][0]']
activation_36 (Activation) (None, 3, 3, 128) 0 ['batch_normalization_36[0][0]']
conv2d_32 (Conv2D) (None, 3, 3, 128) 114688 ['activation_31[0][0]']
conv2d_37 (Conv2D) (None, 3, 3, 128) 114688 ['activation_36[0][0]']
batch_normalization_32 (BatchN (None, 3, 3, 128) 384 ['conv2d_32[0][0]']
ormalization)
batch_normalization_37 (BatchN (None, 3, 3, 128) 384 ['conv2d_37[0][0]']
ormalization)
activation_32 (Activation) (None, 3, 3, 128) 0 ['batch_normalization_32[0][0]']
activation_37 (Activation) (None, 3, 3, 128) 0 ['batch_normalization_37[0][0]']
average_pooling2d_3 (AveragePo (None, 3, 3, 768) 0 ['mixed3[0][0]']
oling2D)
conv2d_30 (Conv2D) (None, 3, 3, 192) 147456 ['mixed3[0][0]']
conv2d_33 (Conv2D) (None, 3, 3, 192) 172032 ['activation_32[0][0]']
conv2d_38 (Conv2D) (None, 3, 3, 192) 172032 ['activation_37[0][0]']
conv2d_39 (Conv2D) (None, 3, 3, 192) 147456 ['average_pooling2d_3[0][0]']
batch_normalization_30 (BatchN (None, 3, 3, 192) 576 ['conv2d_30[0][0]']
ormalization)
batch_normalization_33 (BatchN (None, 3, 3, 192) 576 ['conv2d_33[0][0]']
ormalization)
batch_normalization_38 (BatchN (None, 3, 3, 192) 576 ['conv2d_38[0][0]']
ormalization)
batch_normalization_39 (BatchN (None, 3, 3, 192) 576 ['conv2d_39[0][0]']
ormalization)
activation_30 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_30[0][0]']
activation_33 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_33[0][0]']
activation_38 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_38[0][0]']
activation_39 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_39[0][0]']
mixed4 (Concatenate) (None, 3, 3, 768) 0 ['activation_30[0][0]',
'activation_33[0][0]',
'activation_38[0][0]',
'activation_39[0][0]']
conv2d_44 (Conv2D) (None, 3, 3, 160) 122880 ['mixed4[0][0]']
batch_normalization_44 (BatchN (None, 3, 3, 160) 480 ['conv2d_44[0][0]']
ormalization)
activation_44 (Activation) (None, 3, 3, 160) 0 ['batch_normalization_44[0][0]']
conv2d_45 (Conv2D) (None, 3, 3, 160) 179200 ['activation_44[0][0]']
batch_normalization_45 (BatchN (None, 3, 3, 160) 480 ['conv2d_45[0][0]']
ormalization)
activation_45 (Activation) (None, 3, 3, 160) 0 ['batch_normalization_45[0][0]']
conv2d_41 (Conv2D) (None, 3, 3, 160) 122880 ['mixed4[0][0]']
conv2d_46 (Conv2D) (None, 3, 3, 160) 179200 ['activation_45[0][0]']
batch_normalization_41 (BatchN (None, 3, 3, 160) 480 ['conv2d_41[0][0]']
ormalization)
batch_normalization_46 (BatchN (None, 3, 3, 160) 480 ['conv2d_46[0][0]']
ormalization)
activation_41 (Activation) (None, 3, 3, 160) 0 ['batch_normalization_41[0][0]']
activation_46 (Activation) (None, 3, 3, 160) 0 ['batch_normalization_46[0][0]']
conv2d_42 (Conv2D) (None, 3, 3, 160) 179200 ['activation_41[0][0]']
conv2d_47 (Conv2D) (None, 3, 3, 160) 179200 ['activation_46[0][0]']
batch_normalization_42 (BatchN (None, 3, 3, 160) 480 ['conv2d_42[0][0]']
ormalization)
batch_normalization_47 (BatchN (None, 3, 3, 160) 480 ['conv2d_47[0][0]']
ormalization)
activation_42 (Activation) (None, 3, 3, 160) 0 ['batch_normalization_42[0][0]']
activation_47 (Activation) (None, 3, 3, 160) 0 ['batch_normalization_47[0][0]']
average_pooling2d_4 (AveragePo (None, 3, 3, 768) 0 ['mixed4[0][0]']
oling2D)
conv2d_40 (Conv2D) (None, 3, 3, 192) 147456 ['mixed4[0][0]']
conv2d_43 (Conv2D) (None, 3, 3, 192) 215040 ['activation_42[0][0]']
conv2d_48 (Conv2D) (None, 3, 3, 192) 215040 ['activation_47[0][0]']
conv2d_49 (Conv2D) (None, 3, 3, 192) 147456 ['average_pooling2d_4[0][0]']
batch_normalization_40 (BatchN (None, 3, 3, 192) 576 ['conv2d_40[0][0]']
ormalization)
batch_normalization_43 (BatchN (None, 3, 3, 192) 576 ['conv2d_43[0][0]']
ormalization)
batch_normalization_48 (BatchN (None, 3, 3, 192) 576 ['conv2d_48[0][0]']
ormalization)
batch_normalization_49 (BatchN (None, 3, 3, 192) 576 ['conv2d_49[0][0]']
ormalization)
activation_40 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_40[0][0]']
activation_43 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_43[0][0]']
activation_48 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_48[0][0]']
activation_49 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_49[0][0]']
mixed5 (Concatenate) (None, 3, 3, 768) 0 ['activation_40[0][0]',
'activation_43[0][0]',
'activation_48[0][0]',
'activation_49[0][0]']
conv2d_54 (Conv2D) (None, 3, 3, 160) 122880 ['mixed5[0][0]']
batch_normalization_54 (BatchN (None, 3, 3, 160) 480 ['conv2d_54[0][0]']
ormalization)
activation_54 (Activation) (None, 3, 3, 160) 0 ['batch_normalization_54[0][0]']
conv2d_55 (Conv2D) (None, 3, 3, 160) 179200 ['activation_54[0][0]']
batch_normalization_55 (BatchN (None, 3, 3, 160) 480 ['conv2d_55[0][0]']
ormalization)
activation_55 (Activation) (None, 3, 3, 160) 0 ['batch_normalization_55[0][0]']
conv2d_51 (Conv2D) (None, 3, 3, 160) 122880 ['mixed5[0][0]']
conv2d_56 (Conv2D) (None, 3, 3, 160) 179200 ['activation_55[0][0]']
batch_normalization_51 (BatchN (None, 3, 3, 160) 480 ['conv2d_51[0][0]']
ormalization)
batch_normalization_56 (BatchN (None, 3, 3, 160) 480 ['conv2d_56[0][0]']
ormalization)
activation_51 (Activation) (None, 3, 3, 160) 0 ['batch_normalization_51[0][0]']
activation_56 (Activation) (None, 3, 3, 160) 0 ['batch_normalization_56[0][0]']
conv2d_52 (Conv2D) (None, 3, 3, 160) 179200 ['activation_51[0][0]']
conv2d_57 (Conv2D) (None, 3, 3, 160) 179200 ['activation_56[0][0]']
batch_normalization_52 (BatchN (None, 3, 3, 160) 480 ['conv2d_52[0][0]']
ormalization)
batch_normalization_57 (BatchN (None, 3, 3, 160) 480 ['conv2d_57[0][0]']
ormalization)
activation_52 (Activation) (None, 3, 3, 160) 0 ['batch_normalization_52[0][0]']
activation_57 (Activation) (None, 3, 3, 160) 0 ['batch_normalization_57[0][0]']
average_pooling2d_5 (AveragePo (None, 3, 3, 768) 0 ['mixed5[0][0]']
oling2D)
conv2d_50 (Conv2D) (None, 3, 3, 192) 147456 ['mixed5[0][0]']
conv2d_53 (Conv2D) (None, 3, 3, 192) 215040 ['activation_52[0][0]']
conv2d_58 (Conv2D) (None, 3, 3, 192) 215040 ['activation_57[0][0]']
conv2d_59 (Conv2D) (None, 3, 3, 192) 147456 ['average_pooling2d_5[0][0]']
batch_normalization_50 (BatchN (None, 3, 3, 192) 576 ['conv2d_50[0][0]']
ormalization)
batch_normalization_53 (BatchN (None, 3, 3, 192) 576 ['conv2d_53[0][0]']
ormalization)
batch_normalization_58 (BatchN (None, 3, 3, 192) 576 ['conv2d_58[0][0]']
ormalization)
batch_normalization_59 (BatchN (None, 3, 3, 192) 576 ['conv2d_59[0][0]']
ormalization)
activation_50 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_50[0][0]']
activation_53 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_53[0][0]']
activation_58 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_58[0][0]']
activation_59 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_59[0][0]']
mixed6 (Concatenate) (None, 3, 3, 768) 0 ['activation_50[0][0]',
'activation_53[0][0]',
'activation_58[0][0]',
'activation_59[0][0]']
conv2d_64 (Conv2D) (None, 3, 3, 192) 147456 ['mixed6[0][0]']
batch_normalization_64 (BatchN (None, 3, 3, 192) 576 ['conv2d_64[0][0]']
ormalization)
activation_64 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_64[0][0]']
conv2d_65 (Conv2D) (None, 3, 3, 192) 258048 ['activation_64[0][0]']
batch_normalization_65 (BatchN (None, 3, 3, 192) 576 ['conv2d_65[0][0]']
ormalization)
activation_65 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_65[0][0]']
conv2d_61 (Conv2D) (None, 3, 3, 192) 147456 ['mixed6[0][0]']
conv2d_66 (Conv2D) (None, 3, 3, 192) 258048 ['activation_65[0][0]']
batch_normalization_61 (BatchN (None, 3, 3, 192) 576 ['conv2d_61[0][0]']
ormalization)
batch_normalization_66 (BatchN (None, 3, 3, 192) 576 ['conv2d_66[0][0]']
ormalization)
activation_61 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_61[0][0]']
activation_66 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_66[0][0]']
conv2d_62 (Conv2D) (None, 3, 3, 192) 258048 ['activation_61[0][0]']
conv2d_67 (Conv2D) (None, 3, 3, 192) 258048 ['activation_66[0][0]']
batch_normalization_62 (BatchN (None, 3, 3, 192) 576 ['conv2d_62[0][0]']
ormalization)
batch_normalization_67 (BatchN (None, 3, 3, 192) 576 ['conv2d_67[0][0]']
ormalization)
activation_62 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_62[0][0]']
activation_67 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_67[0][0]']
average_pooling2d_6 (AveragePo (None, 3, 3, 768) 0 ['mixed6[0][0]']
oling2D)
conv2d_60 (Conv2D) (None, 3, 3, 192) 147456 ['mixed6[0][0]']
conv2d_63 (Conv2D) (None, 3, 3, 192) 258048 ['activation_62[0][0]']
conv2d_68 (Conv2D) (None, 3, 3, 192) 258048 ['activation_67[0][0]']
conv2d_69 (Conv2D) (None, 3, 3, 192) 147456 ['average_pooling2d_6[0][0]']
batch_normalization_60 (BatchN (None, 3, 3, 192) 576 ['conv2d_60[0][0]']
ormalization)
batch_normalization_63 (BatchN (None, 3, 3, 192) 576 ['conv2d_63[0][0]']
ormalization)
batch_normalization_68 (BatchN (None, 3, 3, 192) 576 ['conv2d_68[0][0]']
ormalization)
batch_normalization_69 (BatchN (None, 3, 3, 192) 576 ['conv2d_69[0][0]']
ormalization)
activation_60 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_60[0][0]']
activation_63 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_63[0][0]']
activation_68 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_68[0][0]']
activation_69 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_69[0][0]']
mixed7 (Concatenate) (None, 3, 3, 768) 0 ['activation_60[0][0]',
'activation_63[0][0]',
'activation_68[0][0]',
'activation_69[0][0]']
conv2d_72 (Conv2D) (None, 3, 3, 192) 147456 ['mixed7[0][0]']
batch_normalization_72 (BatchN (None, 3, 3, 192) 576 ['conv2d_72[0][0]']
ormalization)
activation_72 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_72[0][0]']
conv2d_73 (Conv2D) (None, 3, 3, 192) 258048 ['activation_72[0][0]']
batch_normalization_73 (BatchN (None, 3, 3, 192) 576 ['conv2d_73[0][0]']
ormalization)
activation_73 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_73[0][0]']
conv2d_70 (Conv2D) (None, 3, 3, 192) 147456 ['mixed7[0][0]']
conv2d_74 (Conv2D) (None, 3, 3, 192) 258048 ['activation_73[0][0]']
batch_normalization_70 (BatchN (None, 3, 3, 192) 576 ['conv2d_70[0][0]']
ormalization)
batch_normalization_74 (BatchN (None, 3, 3, 192) 576 ['conv2d_74[0][0]']
ormalization)
activation_70 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_70[0][0]']
activation_74 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_74[0][0]']
conv2d_71 (Conv2D) (None, 1, 1, 320) 552960 ['activation_70[0][0]']
conv2d_75 (Conv2D) (None, 1, 1, 192) 331776 ['activation_74[0][0]']
batch_normalization_71 (BatchN (None, 1, 1, 320) 960 ['conv2d_71[0][0]']
ormalization)
batch_normalization_75 (BatchN (None, 1, 1, 192) 576 ['conv2d_75[0][0]']
ormalization)
activation_71 (Activation) (None, 1, 1, 320) 0 ['batch_normalization_71[0][0]']
activation_75 (Activation) (None, 1, 1, 192) 0 ['batch_normalization_75[0][0]']
max_pooling2d_3 (MaxPooling2D) (None, 1, 1, 768) 0 ['mixed7[0][0]']
mixed8 (Concatenate) (None, 1, 1, 1280) 0 ['activation_71[0][0]',
'activation_75[0][0]',
'max_pooling2d_3[0][0]']
conv2d_80 (Conv2D) (None, 1, 1, 448) 573440 ['mixed8[0][0]']
batch_normalization_80 (BatchN (None, 1, 1, 448) 1344 ['conv2d_80[0][0]']
ormalization)
activation_80 (Activation) (None, 1, 1, 448) 0 ['batch_normalization_80[0][0]']
conv2d_77 (Conv2D) (None, 1, 1, 384) 491520 ['mixed8[0][0]']
conv2d_81 (Conv2D) (None, 1, 1, 384) 1548288 ['activation_80[0][0]']
batch_normalization_77 (BatchN (None, 1, 1, 384) 1152 ['conv2d_77[0][0]']
ormalization)
batch_normalization_81 (BatchN (None, 1, 1, 384) 1152 ['conv2d_81[0][0]']
ormalization)
activation_77 (Activation) (None, 1, 1, 384) 0 ['batch_normalization_77[0][0]']
activation_81 (Activation) (None, 1, 1, 384) 0 ['batch_normalization_81[0][0]']
conv2d_78 (Conv2D) (None, 1, 1, 384) 442368 ['activation_77[0][0]']
conv2d_79 (Conv2D) (None, 1, 1, 384) 442368 ['activation_77[0][0]']
conv2d_82 (Conv2D) (None, 1, 1, 384) 442368 ['activation_81[0][0]']
conv2d_83 (Conv2D) (None, 1, 1, 384) 442368 ['activation_81[0][0]']
average_pooling2d_7 (AveragePo (None, 1, 1, 1280) 0 ['mixed8[0][0]']
oling2D)
conv2d_76 (Conv2D) (None, 1, 1, 320) 409600 ['mixed8[0][0]']
batch_normalization_78 (BatchN (None, 1, 1, 384) 1152 ['conv2d_78[0][0]']
ormalization)
batch_normalization_79 (BatchN (None, 1, 1, 384) 1152 ['conv2d_79[0][0]']
ormalization)
batch_normalization_82 (BatchN (None, 1, 1, 384) 1152 ['conv2d_82[0][0]']
ormalization)
batch_normalization_83 (BatchN (None, 1, 1, 384) 1152 ['conv2d_83[0][0]']
ormalization)
conv2d_84 (Conv2D) (None, 1, 1, 192) 245760 ['average_pooling2d_7[0][0]']
batch_normalization_76 (BatchN (None, 1, 1, 320) 960 ['conv2d_76[0][0]']
ormalization)
activation_78 (Activation) (None, 1, 1, 384) 0 ['batch_normalization_78[0][0]']
activation_79 (Activation) (None, 1, 1, 384) 0 ['batch_normalization_79[0][0]']
activation_82 (Activation) (None, 1, 1, 384) 0 ['batch_normalization_82[0][0]']
activation_83 (Activation) (None, 1, 1, 384) 0 ['batch_normalization_83[0][0]']
batch_normalization_84 (BatchN (None, 1, 1, 192) 576 ['conv2d_84[0][0]']
ormalization)
activation_76 (Activation) (None, 1, 1, 320) 0 ['batch_normalization_76[0][0]']
mixed9_0 (Concatenate) (None, 1, 1, 768) 0 ['activation_78[0][0]',
'activation_79[0][0]']
concatenate (Concatenate) (None, 1, 1, 768) 0 ['activation_82[0][0]',
'activation_83[0][0]']
activation_84 (Activation) (None, 1, 1, 192) 0 ['batch_normalization_84[0][0]']
mixed9 (Concatenate) (None, 1, 1, 2048) 0 ['activation_76[0][0]',
'mixed9_0[0][0]',
'concatenate[0][0]',
'activation_84[0][0]']
conv2d_89 (Conv2D) (None, 1, 1, 448) 917504 ['mixed9[0][0]']
batch_normalization_89 (BatchN (None, 1, 1, 448) 1344 ['conv2d_89[0][0]']
ormalization)
activation_89 (Activation) (None, 1, 1, 448) 0 ['batch_normalization_89[0][0]']
conv2d_86 (Conv2D) (None, 1, 1, 384) 786432 ['mixed9[0][0]']
conv2d_90 (Conv2D) (None, 1, 1, 384) 1548288 ['activation_89[0][0]']
batch_normalization_86 (BatchN (None, 1, 1, 384) 1152 ['conv2d_86[0][0]']
ormalization)
batch_normalization_90 (BatchN (None, 1, 1, 384) 1152 ['conv2d_90[0][0]']
ormalization)
activation_86 (Activation) (None, 1, 1, 384) 0 ['batch_normalization_86[0][0]']
activation_90 (Activation) (None, 1, 1, 384) 0 ['batch_normalization_90[0][0]']
conv2d_87 (Conv2D) (None, 1, 1, 384) 442368 ['activation_86[0][0]']
conv2d_88 (Conv2D) (None, 1, 1, 384) 442368 ['activation_86[0][0]']
conv2d_91 (Conv2D) (None, 1, 1, 384) 442368 ['activation_90[0][0]']
conv2d_92 (Conv2D) (None, 1, 1, 384) 442368 ['activation_90[0][0]']
average_pooling2d_8 (AveragePo (None, 1, 1, 2048) 0 ['mixed9[0][0]']
oling2D)
conv2d_85 (Conv2D) (None, 1, 1, 320) 655360 ['mixed9[0][0]']
batch_normalization_87 (BatchN (None, 1, 1, 384) 1152 ['conv2d_87[0][0]']
ormalization)
batch_normalization_88 (BatchN (None, 1, 1, 384) 1152 ['conv2d_88[0][0]']
ormalization)
batch_normalization_91 (BatchN (None, 1, 1, 384) 1152 ['conv2d_91[0][0]']
ormalization)
batch_normalization_92 (BatchN (None, 1, 1, 384) 1152 ['conv2d_92[0][0]']
ormalization)
conv2d_93 (Conv2D) (None, 1, 1, 192) 393216 ['average_pooling2d_8[0][0]']
batch_normalization_85 (BatchN (None, 1, 1, 320) 960 ['conv2d_85[0][0]']
ormalization)
activation_87 (Activation) (None, 1, 1, 384) 0 ['batch_normalization_87[0][0]']
activation_88 (Activation) (None, 1, 1, 384) 0 ['batch_normalization_88[0][0]']
activation_91 (Activation) (None, 1, 1, 384) 0 ['batch_normalization_91[0][0]']
activation_92 (Activation) (None, 1, 1, 384) 0 ['batch_normalization_92[0][0]']
batch_normalization_93 (BatchN (None, 1, 1, 192) 576 ['conv2d_93[0][0]']
ormalization)
activation_85 (Activation) (None, 1, 1, 320) 0 ['batch_normalization_85[0][0]']
mixed9_1 (Concatenate) (None, 1, 1, 768) 0 ['activation_87[0][0]',
'activation_88[0][0]']
concatenate_1 (Concatenate) (None, 1, 1, 768) 0 ['activation_91[0][0]',
'activation_92[0][0]']
activation_93 (Activation) (None, 1, 1, 192) 0 ['batch_normalization_93[0][0]']
mixed10 (Concatenate) (None, 1, 1, 2048) 0 ['activation_85[0][0]',
'mixed9_1[0][0]',
'concatenate_1[0][0]',
'activation_93[0][0]']
==================================================================================================
Total params: 21,802,784
Trainable params: 21,768,352
Non-trainable params: 34,432
__________________________________________________________________________________________________
Model: "model"
__________________________________________________________________________________________________
Layer (type) Output Shape Param # Connected to
==================================================================================================
input_1 (InputLayer) [(None, 75, 75, 3)] 0 []
conv2d (Conv2D) (None, 37, 37, 32) 864 ['input_1[0][0]']
batch_normalization (BatchNorm (None, 37, 37, 32) 96 ['conv2d[0][0]']
alization)
activation (Activation) (None, 37, 37, 32) 0 ['batch_normalization[0][0]']
conv2d_1 (Conv2D) (None, 35, 35, 32) 9216 ['activation[0][0]']
batch_normalization_1 (BatchNo (None, 35, 35, 32) 96 ['conv2d_1[0][0]']
rmalization)
activation_1 (Activation) (None, 35, 35, 32) 0 ['batch_normalization_1[0][0]']
conv2d_2 (Conv2D) (None, 35, 35, 64) 18432 ['activation_1[0][0]']
batch_normalization_2 (BatchNo (None, 35, 35, 64) 192 ['conv2d_2[0][0]']
rmalization)
activation_2 (Activation) (None, 35, 35, 64) 0 ['batch_normalization_2[0][0]']
max_pooling2d (MaxPooling2D) (None, 17, 17, 64) 0 ['activation_2[0][0]']
conv2d_3 (Conv2D) (None, 17, 17, 80) 5120 ['max_pooling2d[0][0]']
batch_normalization_3 (BatchNo (None, 17, 17, 80) 240 ['conv2d_3[0][0]']
rmalization)
activation_3 (Activation) (None, 17, 17, 80) 0 ['batch_normalization_3[0][0]']
conv2d_4 (Conv2D) (None, 15, 15, 192) 138240 ['activation_3[0][0]']
batch_normalization_4 (BatchNo (None, 15, 15, 192) 576 ['conv2d_4[0][0]']
rmalization)
activation_4 (Activation) (None, 15, 15, 192) 0 ['batch_normalization_4[0][0]']
max_pooling2d_1 (MaxPooling2D) (None, 7, 7, 192) 0 ['activation_4[0][0]']
conv2d_8 (Conv2D) (None, 7, 7, 64) 12288 ['max_pooling2d_1[0][0]']
batch_normalization_8 (BatchNo (None, 7, 7, 64) 192 ['conv2d_8[0][0]']
rmalization)
activation_8 (Activation) (None, 7, 7, 64) 0 ['batch_normalization_8[0][0]']
conv2d_6 (Conv2D) (None, 7, 7, 48) 9216 ['max_pooling2d_1[0][0]']
conv2d_9 (Conv2D) (None, 7, 7, 96) 55296 ['activation_8[0][0]']
batch_normalization_6 (BatchNo (None, 7, 7, 48) 144 ['conv2d_6[0][0]']
rmalization)
batch_normalization_9 (BatchNo (None, 7, 7, 96) 288 ['conv2d_9[0][0]']
rmalization)
activation_6 (Activation) (None, 7, 7, 48) 0 ['batch_normalization_6[0][0]']
activation_9 (Activation) (None, 7, 7, 96) 0 ['batch_normalization_9[0][0]']
average_pooling2d (AveragePool (None, 7, 7, 192) 0 ['max_pooling2d_1[0][0]']
ing2D)
conv2d_5 (Conv2D) (None, 7, 7, 64) 12288 ['max_pooling2d_1[0][0]']
conv2d_7 (Conv2D) (None, 7, 7, 64) 76800 ['activation_6[0][0]']
conv2d_10 (Conv2D) (None, 7, 7, 96) 82944 ['activation_9[0][0]']
conv2d_11 (Conv2D) (None, 7, 7, 32) 6144 ['average_pooling2d[0][0]']
batch_normalization_5 (BatchNo (None, 7, 7, 64) 192 ['conv2d_5[0][0]']
rmalization)
batch_normalization_7 (BatchNo (None, 7, 7, 64) 192 ['conv2d_7[0][0]']
rmalization)
batch_normalization_10 (BatchN (None, 7, 7, 96) 288 ['conv2d_10[0][0]']
ormalization)
batch_normalization_11 (BatchN (None, 7, 7, 32) 96 ['conv2d_11[0][0]']
ormalization)
activation_5 (Activation) (None, 7, 7, 64) 0 ['batch_normalization_5[0][0]']
activation_7 (Activation) (None, 7, 7, 64) 0 ['batch_normalization_7[0][0]']
activation_10 (Activation) (None, 7, 7, 96) 0 ['batch_normalization_10[0][0]']
activation_11 (Activation) (None, 7, 7, 32) 0 ['batch_normalization_11[0][0]']
mixed0 (Concatenate) (None, 7, 7, 256) 0 ['activation_5[0][0]',
'activation_7[0][0]',
'activation_10[0][0]',
'activation_11[0][0]']
conv2d_15 (Conv2D) (None, 7, 7, 64) 16384 ['mixed0[0][0]']
batch_normalization_15 (BatchN (None, 7, 7, 64) 192 ['conv2d_15[0][0]']
ormalization)
activation_15 (Activation) (None, 7, 7, 64) 0 ['batch_normalization_15[0][0]']
conv2d_13 (Conv2D) (None, 7, 7, 48) 12288 ['mixed0[0][0]']
conv2d_16 (Conv2D) (None, 7, 7, 96) 55296 ['activation_15[0][0]']
batch_normalization_13 (BatchN (None, 7, 7, 48) 144 ['conv2d_13[0][0]']
ormalization)
batch_normalization_16 (BatchN (None, 7, 7, 96) 288 ['conv2d_16[0][0]']
ormalization)
activation_13 (Activation) (None, 7, 7, 48) 0 ['batch_normalization_13[0][0]']
activation_16 (Activation) (None, 7, 7, 96) 0 ['batch_normalization_16[0][0]']
average_pooling2d_1 (AveragePo (None, 7, 7, 256) 0 ['mixed0[0][0]']
oling2D)
conv2d_12 (Conv2D) (None, 7, 7, 64) 16384 ['mixed0[0][0]']
conv2d_14 (Conv2D) (None, 7, 7, 64) 76800 ['activation_13[0][0]']
conv2d_17 (Conv2D) (None, 7, 7, 96) 82944 ['activation_16[0][0]']
conv2d_18 (Conv2D) (None, 7, 7, 64) 16384 ['average_pooling2d_1[0][0]']
batch_normalization_12 (BatchN (None, 7, 7, 64) 192 ['conv2d_12[0][0]']
ormalization)
batch_normalization_14 (BatchN (None, 7, 7, 64) 192 ['conv2d_14[0][0]']
ormalization)
batch_normalization_17 (BatchN (None, 7, 7, 96) 288 ['conv2d_17[0][0]']
ormalization)
batch_normalization_18 (BatchN (None, 7, 7, 64) 192 ['conv2d_18[0][0]']
ormalization)
activation_12 (Activation) (None, 7, 7, 64) 0 ['batch_normalization_12[0][0]']
activation_14 (Activation) (None, 7, 7, 64) 0 ['batch_normalization_14[0][0]']
activation_17 (Activation) (None, 7, 7, 96) 0 ['batch_normalization_17[0][0]']
activation_18 (Activation) (None, 7, 7, 64) 0 ['batch_normalization_18[0][0]']
mixed1 (Concatenate) (None, 7, 7, 288) 0 ['activation_12[0][0]',
'activation_14[0][0]',
'activation_17[0][0]',
'activation_18[0][0]']
conv2d_22 (Conv2D) (None, 7, 7, 64) 18432 ['mixed1[0][0]']
batch_normalization_22 (BatchN (None, 7, 7, 64) 192 ['conv2d_22[0][0]']
ormalization)
activation_22 (Activation) (None, 7, 7, 64) 0 ['batch_normalization_22[0][0]']
conv2d_20 (Conv2D) (None, 7, 7, 48) 13824 ['mixed1[0][0]']
conv2d_23 (Conv2D) (None, 7, 7, 96) 55296 ['activation_22[0][0]']
batch_normalization_20 (BatchN (None, 7, 7, 48) 144 ['conv2d_20[0][0]']
ormalization)
batch_normalization_23 (BatchN (None, 7, 7, 96) 288 ['conv2d_23[0][0]']
ormalization)
activation_20 (Activation) (None, 7, 7, 48) 0 ['batch_normalization_20[0][0]']
activation_23 (Activation) (None, 7, 7, 96) 0 ['batch_normalization_23[0][0]']
average_pooling2d_2 (AveragePo (None, 7, 7, 288) 0 ['mixed1[0][0]']
oling2D)
conv2d_19 (Conv2D) (None, 7, 7, 64) 18432 ['mixed1[0][0]']
conv2d_21 (Conv2D) (None, 7, 7, 64) 76800 ['activation_20[0][0]']
conv2d_24 (Conv2D) (None, 7, 7, 96) 82944 ['activation_23[0][0]']
conv2d_25 (Conv2D) (None, 7, 7, 64) 18432 ['average_pooling2d_2[0][0]']
batch_normalization_19 (BatchN (None, 7, 7, 64) 192 ['conv2d_19[0][0]']
ormalization)
batch_normalization_21 (BatchN (None, 7, 7, 64) 192 ['conv2d_21[0][0]']
ormalization)
batch_normalization_24 (BatchN (None, 7, 7, 96) 288 ['conv2d_24[0][0]']
ormalization)
batch_normalization_25 (BatchN (None, 7, 7, 64) 192 ['conv2d_25[0][0]']
ormalization)
activation_19 (Activation) (None, 7, 7, 64) 0 ['batch_normalization_19[0][0]']
activation_21 (Activation) (None, 7, 7, 64) 0 ['batch_normalization_21[0][0]']
activation_24 (Activation) (None, 7, 7, 96) 0 ['batch_normalization_24[0][0]']
activation_25 (Activation) (None, 7, 7, 64) 0 ['batch_normalization_25[0][0]']
mixed2 (Concatenate) (None, 7, 7, 288) 0 ['activation_19[0][0]',
'activation_21[0][0]',
'activation_24[0][0]',
'activation_25[0][0]']
conv2d_27 (Conv2D) (None, 7, 7, 64) 18432 ['mixed2[0][0]']
batch_normalization_27 (BatchN (None, 7, 7, 64) 192 ['conv2d_27[0][0]']
ormalization)
activation_27 (Activation) (None, 7, 7, 64) 0 ['batch_normalization_27[0][0]']
conv2d_28 (Conv2D) (None, 7, 7, 96) 55296 ['activation_27[0][0]']
batch_normalization_28 (BatchN (None, 7, 7, 96) 288 ['conv2d_28[0][0]']
ormalization)
activation_28 (Activation) (None, 7, 7, 96) 0 ['batch_normalization_28[0][0]']
conv2d_26 (Conv2D) (None, 3, 3, 384) 995328 ['mixed2[0][0]']
conv2d_29 (Conv2D) (None, 3, 3, 96) 82944 ['activation_28[0][0]']
batch_normalization_26 (BatchN (None, 3, 3, 384) 1152 ['conv2d_26[0][0]']
ormalization)
batch_normalization_29 (BatchN (None, 3, 3, 96) 288 ['conv2d_29[0][0]']
ormalization)
activation_26 (Activation) (None, 3, 3, 384) 0 ['batch_normalization_26[0][0]']
activation_29 (Activation) (None, 3, 3, 96) 0 ['batch_normalization_29[0][0]']
max_pooling2d_2 (MaxPooling2D) (None, 3, 3, 288) 0 ['mixed2[0][0]']
mixed3 (Concatenate) (None, 3, 3, 768) 0 ['activation_26[0][0]',
'activation_29[0][0]',
'max_pooling2d_2[0][0]']
conv2d_34 (Conv2D) (None, 3, 3, 128) 98304 ['mixed3[0][0]']
batch_normalization_34 (BatchN (None, 3, 3, 128) 384 ['conv2d_34[0][0]']
ormalization)
activation_34 (Activation) (None, 3, 3, 128) 0 ['batch_normalization_34[0][0]']
conv2d_35 (Conv2D) (None, 3, 3, 128) 114688 ['activation_34[0][0]']
batch_normalization_35 (BatchN (None, 3, 3, 128) 384 ['conv2d_35[0][0]']
ormalization)
activation_35 (Activation) (None, 3, 3, 128) 0 ['batch_normalization_35[0][0]']
conv2d_31 (Conv2D) (None, 3, 3, 128) 98304 ['mixed3[0][0]']
conv2d_36 (Conv2D) (None, 3, 3, 128) 114688 ['activation_35[0][0]']
batch_normalization_31 (BatchN (None, 3, 3, 128) 384 ['conv2d_31[0][0]']
ormalization)
batch_normalization_36 (BatchN (None, 3, 3, 128) 384 ['conv2d_36[0][0]']
ormalization)
activation_31 (Activation) (None, 3, 3, 128) 0 ['batch_normalization_31[0][0]']
activation_36 (Activation) (None, 3, 3, 128) 0 ['batch_normalization_36[0][0]']
conv2d_32 (Conv2D) (None, 3, 3, 128) 114688 ['activation_31[0][0]']
conv2d_37 (Conv2D) (None, 3, 3, 128) 114688 ['activation_36[0][0]']
batch_normalization_32 (BatchN (None, 3, 3, 128) 384 ['conv2d_32[0][0]']
ormalization)
batch_normalization_37 (BatchN (None, 3, 3, 128) 384 ['conv2d_37[0][0]']
ormalization)
activation_32 (Activation) (None, 3, 3, 128) 0 ['batch_normalization_32[0][0]']
activation_37 (Activation) (None, 3, 3, 128) 0 ['batch_normalization_37[0][0]']
average_pooling2d_3 (AveragePo (None, 3, 3, 768) 0 ['mixed3[0][0]']
oling2D)
conv2d_30 (Conv2D) (None, 3, 3, 192) 147456 ['mixed3[0][0]']
conv2d_33 (Conv2D) (None, 3, 3, 192) 172032 ['activation_32[0][0]']
conv2d_38 (Conv2D) (None, 3, 3, 192) 172032 ['activation_37[0][0]']
conv2d_39 (Conv2D) (None, 3, 3, 192) 147456 ['average_pooling2d_3[0][0]']
batch_normalization_30 (BatchN (None, 3, 3, 192) 576 ['conv2d_30[0][0]']
ormalization)
batch_normalization_33 (BatchN (None, 3, 3, 192) 576 ['conv2d_33[0][0]']
ormalization)
batch_normalization_38 (BatchN (None, 3, 3, 192) 576 ['conv2d_38[0][0]']
ormalization)
batch_normalization_39 (BatchN (None, 3, 3, 192) 576 ['conv2d_39[0][0]']
ormalization)
activation_30 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_30[0][0]']
activation_33 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_33[0][0]']
activation_38 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_38[0][0]']
activation_39 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_39[0][0]']
mixed4 (Concatenate) (None, 3, 3, 768) 0 ['activation_30[0][0]',
'activation_33[0][0]',
'activation_38[0][0]',
'activation_39[0][0]']
conv2d_44 (Conv2D) (None, 3, 3, 160) 122880 ['mixed4[0][0]']
batch_normalization_44 (BatchN (None, 3, 3, 160) 480 ['conv2d_44[0][0]']
ormalization)
activation_44 (Activation) (None, 3, 3, 160) 0 ['batch_normalization_44[0][0]']
conv2d_45 (Conv2D) (None, 3, 3, 160) 179200 ['activation_44[0][0]']
batch_normalization_45 (BatchN (None, 3, 3, 160) 480 ['conv2d_45[0][0]']
ormalization)
activation_45 (Activation) (None, 3, 3, 160) 0 ['batch_normalization_45[0][0]']
conv2d_41 (Conv2D) (None, 3, 3, 160) 122880 ['mixed4[0][0]']
conv2d_46 (Conv2D) (None, 3, 3, 160) 179200 ['activation_45[0][0]']
batch_normalization_41 (BatchN (None, 3, 3, 160) 480 ['conv2d_41[0][0]']
ormalization)
batch_normalization_46 (BatchN (None, 3, 3, 160) 480 ['conv2d_46[0][0]']
ormalization)
activation_41 (Activation) (None, 3, 3, 160) 0 ['batch_normalization_41[0][0]']
activation_46 (Activation) (None, 3, 3, 160) 0 ['batch_normalization_46[0][0]']
conv2d_42 (Conv2D) (None, 3, 3, 160) 179200 ['activation_41[0][0]']
conv2d_47 (Conv2D) (None, 3, 3, 160) 179200 ['activation_46[0][0]']
batch_normalization_42 (BatchN (None, 3, 3, 160) 480 ['conv2d_42[0][0]']
ormalization)
batch_normalization_47 (BatchN (None, 3, 3, 160) 480 ['conv2d_47[0][0]']
ormalization)
activation_42 (Activation) (None, 3, 3, 160) 0 ['batch_normalization_42[0][0]']
activation_47 (Activation) (None, 3, 3, 160) 0 ['batch_normalization_47[0][0]']
average_pooling2d_4 (AveragePo (None, 3, 3, 768) 0 ['mixed4[0][0]']
oling2D)
conv2d_40 (Conv2D) (None, 3, 3, 192) 147456 ['mixed4[0][0]']
conv2d_43 (Conv2D) (None, 3, 3, 192) 215040 ['activation_42[0][0]']
conv2d_48 (Conv2D) (None, 3, 3, 192) 215040 ['activation_47[0][0]']
conv2d_49 (Conv2D) (None, 3, 3, 192) 147456 ['average_pooling2d_4[0][0]']
batch_normalization_40 (BatchN (None, 3, 3, 192) 576 ['conv2d_40[0][0]']
ormalization)
batch_normalization_43 (BatchN (None, 3, 3, 192) 576 ['conv2d_43[0][0]']
ormalization)
batch_normalization_48 (BatchN (None, 3, 3, 192) 576 ['conv2d_48[0][0]']
ormalization)
batch_normalization_49 (BatchN (None, 3, 3, 192) 576 ['conv2d_49[0][0]']
ormalization)
activation_40 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_40[0][0]']
activation_43 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_43[0][0]']
activation_48 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_48[0][0]']
activation_49 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_49[0][0]']
mixed5 (Concatenate) (None, 3, 3, 768) 0 ['activation_40[0][0]',
'activation_43[0][0]',
'activation_48[0][0]',
'activation_49[0][0]']
conv2d_54 (Conv2D) (None, 3, 3, 160) 122880 ['mixed5[0][0]']
batch_normalization_54 (BatchN (None, 3, 3, 160) 480 ['conv2d_54[0][0]']
ormalization)
activation_54 (Activation) (None, 3, 3, 160) 0 ['batch_normalization_54[0][0]']
conv2d_55 (Conv2D) (None, 3, 3, 160) 179200 ['activation_54[0][0]']
batch_normalization_55 (BatchN (None, 3, 3, 160) 480 ['conv2d_55[0][0]']
ormalization)
activation_55 (Activation) (None, 3, 3, 160) 0 ['batch_normalization_55[0][0]']
conv2d_51 (Conv2D) (None, 3, 3, 160) 122880 ['mixed5[0][0]']
conv2d_56 (Conv2D) (None, 3, 3, 160) 179200 ['activation_55[0][0]']
batch_normalization_51 (BatchN (None, 3, 3, 160) 480 ['conv2d_51[0][0]']
ormalization)
batch_normalization_56 (BatchN (None, 3, 3, 160) 480 ['conv2d_56[0][0]']
ormalization)
activation_51 (Activation) (None, 3, 3, 160) 0 ['batch_normalization_51[0][0]']
activation_56 (Activation) (None, 3, 3, 160) 0 ['batch_normalization_56[0][0]']
conv2d_52 (Conv2D) (None, 3, 3, 160) 179200 ['activation_51[0][0]']
conv2d_57 (Conv2D) (None, 3, 3, 160) 179200 ['activation_56[0][0]']
batch_normalization_52 (BatchN (None, 3, 3, 160) 480 ['conv2d_52[0][0]']
ormalization)
batch_normalization_57 (BatchN (None, 3, 3, 160) 480 ['conv2d_57[0][0]']
ormalization)
activation_52 (Activation) (None, 3, 3, 160) 0 ['batch_normalization_52[0][0]']
activation_57 (Activation) (None, 3, 3, 160) 0 ['batch_normalization_57[0][0]']
average_pooling2d_5 (AveragePo (None, 3, 3, 768) 0 ['mixed5[0][0]']
oling2D)
conv2d_50 (Conv2D) (None, 3, 3, 192) 147456 ['mixed5[0][0]']
conv2d_53 (Conv2D) (None, 3, 3, 192) 215040 ['activation_52[0][0]']
conv2d_58 (Conv2D) (None, 3, 3, 192) 215040 ['activation_57[0][0]']
conv2d_59 (Conv2D) (None, 3, 3, 192) 147456 ['average_pooling2d_5[0][0]']
batch_normalization_50 (BatchN (None, 3, 3, 192) 576 ['conv2d_50[0][0]']
ormalization)
batch_normalization_53 (BatchN (None, 3, 3, 192) 576 ['conv2d_53[0][0]']
ormalization)
batch_normalization_58 (BatchN (None, 3, 3, 192) 576 ['conv2d_58[0][0]']
ormalization)
batch_normalization_59 (BatchN (None, 3, 3, 192) 576 ['conv2d_59[0][0]']
ormalization)
activation_50 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_50[0][0]']
activation_53 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_53[0][0]']
activation_58 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_58[0][0]']
activation_59 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_59[0][0]']
mixed6 (Concatenate) (None, 3, 3, 768) 0 ['activation_50[0][0]',
'activation_53[0][0]',
'activation_58[0][0]',
'activation_59[0][0]']
conv2d_64 (Conv2D) (None, 3, 3, 192) 147456 ['mixed6[0][0]']
batch_normalization_64 (BatchN (None, 3, 3, 192) 576 ['conv2d_64[0][0]']
ormalization)
activation_64 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_64[0][0]']
conv2d_65 (Conv2D) (None, 3, 3, 192) 258048 ['activation_64[0][0]']
batch_normalization_65 (BatchN (None, 3, 3, 192) 576 ['conv2d_65[0][0]']
ormalization)
activation_65 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_65[0][0]']
conv2d_61 (Conv2D) (None, 3, 3, 192) 147456 ['mixed6[0][0]']
conv2d_66 (Conv2D) (None, 3, 3, 192) 258048 ['activation_65[0][0]']
batch_normalization_61 (BatchN (None, 3, 3, 192) 576 ['conv2d_61[0][0]']
ormalization)
batch_normalization_66 (BatchN (None, 3, 3, 192) 576 ['conv2d_66[0][0]']
ormalization)
activation_61 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_61[0][0]']
activation_66 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_66[0][0]']
conv2d_62 (Conv2D) (None, 3, 3, 192) 258048 ['activation_61[0][0]']
conv2d_67 (Conv2D) (None, 3, 3, 192) 258048 ['activation_66[0][0]']
batch_normalization_62 (BatchN (None, 3, 3, 192) 576 ['conv2d_62[0][0]']
ormalization)
batch_normalization_67 (BatchN (None, 3, 3, 192) 576 ['conv2d_67[0][0]']
ormalization)
activation_62 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_62[0][0]']
activation_67 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_67[0][0]']
average_pooling2d_6 (AveragePo (None, 3, 3, 768) 0 ['mixed6[0][0]']
oling2D)
conv2d_60 (Conv2D) (None, 3, 3, 192) 147456 ['mixed6[0][0]']
conv2d_63 (Conv2D) (None, 3, 3, 192) 258048 ['activation_62[0][0]']
conv2d_68 (Conv2D) (None, 3, 3, 192) 258048 ['activation_67[0][0]']
conv2d_69 (Conv2D) (None, 3, 3, 192) 147456 ['average_pooling2d_6[0][0]']
batch_normalization_60 (BatchN (None, 3, 3, 192) 576 ['conv2d_60[0][0]']
ormalization)
batch_normalization_63 (BatchN (None, 3, 3, 192) 576 ['conv2d_63[0][0]']
ormalization)
batch_normalization_68 (BatchN (None, 3, 3, 192) 576 ['conv2d_68[0][0]']
ormalization)
batch_normalization_69 (BatchN (None, 3, 3, 192) 576 ['conv2d_69[0][0]']
ormalization)
activation_60 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_60[0][0]']
activation_63 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_63[0][0]']
activation_68 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_68[0][0]']
activation_69 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_69[0][0]']
mixed7 (Concatenate) (None, 3, 3, 768) 0 ['activation_60[0][0]',
'activation_63[0][0]',
'activation_68[0][0]',
'activation_69[0][0]']
conv2d_72 (Conv2D) (None, 3, 3, 192) 147456 ['mixed7[0][0]']
batch_normalization_72 (BatchN (None, 3, 3, 192) 576 ['conv2d_72[0][0]']
ormalization)
activation_72 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_72[0][0]']
conv2d_73 (Conv2D) (None, 3, 3, 192) 258048 ['activation_72[0][0]']
batch_normalization_73 (BatchN (None, 3, 3, 192) 576 ['conv2d_73[0][0]']
ormalization)
activation_73 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_73[0][0]']
conv2d_70 (Conv2D) (None, 3, 3, 192) 147456 ['mixed7[0][0]']
conv2d_74 (Conv2D) (None, 3, 3, 192) 258048 ['activation_73[0][0]']
batch_normalization_70 (BatchN (None, 3, 3, 192) 576 ['conv2d_70[0][0]']
ormalization)
batch_normalization_74 (BatchN (None, 3, 3, 192) 576 ['conv2d_74[0][0]']
ormalization)
activation_70 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_70[0][0]']
activation_74 (Activation) (None, 3, 3, 192) 0 ['batch_normalization_74[0][0]']
conv2d_71 (Conv2D) (None, 1, 1, 320) 552960 ['activation_70[0][0]']
conv2d_75 (Conv2D) (None, 1, 1, 192) 331776 ['activation_74[0][0]']
batch_normalization_71 (BatchN (None, 1, 1, 320) 960 ['conv2d_71[0][0]']
ormalization)
batch_normalization_75 (BatchN (None, 1, 1, 192) 576 ['conv2d_75[0][0]']
ormalization)
activation_71 (Activation) (None, 1, 1, 320) 0 ['batch_normalization_71[0][0]']
activation_75 (Activation) (None, 1, 1, 192) 0 ['batch_normalization_75[0][0]']
max_pooling2d_3 (MaxPooling2D) (None, 1, 1, 768) 0 ['mixed7[0][0]']
mixed8 (Concatenate) (None, 1, 1, 1280) 0 ['activation_71[0][0]',
'activation_75[0][0]',
'max_pooling2d_3[0][0]']
conv2d_80 (Conv2D) (None, 1, 1, 448) 573440 ['mixed8[0][0]']
batch_normalization_80 (BatchN (None, 1, 1, 448) 1344 ['conv2d_80[0][0]']
ormalization)
activation_80 (Activation) (None, 1, 1, 448) 0 ['batch_normalization_80[0][0]']
conv2d_77 (Conv2D) (None, 1, 1, 384) 491520 ['mixed8[0][0]']
conv2d_81 (Conv2D) (None, 1, 1, 384) 1548288 ['activation_80[0][0]']
batch_normalization_77 (BatchN (None, 1, 1, 384) 1152 ['conv2d_77[0][0]']
ormalization)
batch_normalization_81 (BatchN (None, 1, 1, 384) 1152 ['conv2d_81[0][0]']
ormalization)
activation_77 (Activation) (None, 1, 1, 384) 0 ['batch_normalization_77[0][0]']
activation_81 (Activation) (None, 1, 1, 384) 0 ['batch_normalization_81[0][0]']
conv2d_78 (Conv2D) (None, 1, 1, 384) 442368 ['activation_77[0][0]']
conv2d_79 (Conv2D) (None, 1, 1, 384) 442368 ['activation_77[0][0]']
conv2d_82 (Conv2D) (None, 1, 1, 384) 442368 ['activation_81[0][0]']
conv2d_83 (Conv2D) (None, 1, 1, 384) 442368 ['activation_81[0][0]']
average_pooling2d_7 (AveragePo (None, 1, 1, 1280) 0 ['mixed8[0][0]']
oling2D)
conv2d_76 (Conv2D) (None, 1, 1, 320) 409600 ['mixed8[0][0]']
batch_normalization_78 (BatchN (None, 1, 1, 384) 1152 ['conv2d_78[0][0]']
ormalization)
batch_normalization_79 (BatchN (None, 1, 1, 384) 1152 ['conv2d_79[0][0]']
ormalization)
batch_normalization_82 (BatchN (None, 1, 1, 384) 1152 ['conv2d_82[0][0]']
ormalization)
batch_normalization_83 (BatchN (None, 1, 1, 384) 1152 ['conv2d_83[0][0]']
ormalization)
conv2d_84 (Conv2D) (None, 1, 1, 192) 245760 ['average_pooling2d_7[0][0]']
batch_normalization_76 (BatchN (None, 1, 1, 320) 960 ['conv2d_76[0][0]']
ormalization)
activation_78 (Activation) (None, 1, 1, 384) 0 ['batch_normalization_78[0][0]']
activation_79 (Activation) (None, 1, 1, 384) 0 ['batch_normalization_79[0][0]']
activation_82 (Activation) (None, 1, 1, 384) 0 ['batch_normalization_82[0][0]']
activation_83 (Activation) (None, 1, 1, 384) 0 ['batch_normalization_83[0][0]']
batch_normalization_84 (BatchN (None, 1, 1, 192) 576 ['conv2d_84[0][0]']
ormalization)
activation_76 (Activation) (None, 1, 1, 320) 0 ['batch_normalization_76[0][0]']
mixed9_0 (Concatenate) (None, 1, 1, 768) 0 ['activation_78[0][0]',
'activation_79[0][0]']
concatenate (Concatenate) (None, 1, 1, 768) 0 ['activation_82[0][0]',
'activation_83[0][0]']
activation_84 (Activation) (None, 1, 1, 192) 0 ['batch_normalization_84[0][0]']
mixed9 (Concatenate) (None, 1, 1, 2048) 0 ['activation_76[0][0]',
'mixed9_0[0][0]',
'concatenate[0][0]',
'activation_84[0][0]']
conv2d_89 (Conv2D) (None, 1, 1, 448) 917504 ['mixed9[0][0]']
batch_normalization_89 (BatchN (None, 1, 1, 448) 1344 ['conv2d_89[0][0]']
ormalization)
activation_89 (Activation) (None, 1, 1, 448) 0 ['batch_normalization_89[0][0]']
conv2d_86 (Conv2D) (None, 1, 1, 384) 786432 ['mixed9[0][0]']
conv2d_90 (Conv2D) (None, 1, 1, 384) 1548288 ['activation_89[0][0]']
batch_normalization_86 (BatchN (None, 1, 1, 384) 1152 ['conv2d_86[0][0]']
ormalization)
batch_normalization_90 (BatchN (None, 1, 1, 384) 1152 ['conv2d_90[0][0]']
ormalization)
activation_86 (Activation) (None, 1, 1, 384) 0 ['batch_normalization_86[0][0]']
activation_90 (Activation) (None, 1, 1, 384) 0 ['batch_normalization_90[0][0]']
conv2d_87 (Conv2D) (None, 1, 1, 384) 442368 ['activation_86[0][0]']
conv2d_88 (Conv2D) (None, 1, 1, 384) 442368 ['activation_86[0][0]']
conv2d_91 (Conv2D) (None, 1, 1, 384) 442368 ['activation_90[0][0]']
conv2d_92 (Conv2D) (None, 1, 1, 384) 442368 ['activation_90[0][0]']
average_pooling2d_8 (AveragePo (None, 1, 1, 2048) 0 ['mixed9[0][0]']
oling2D)
conv2d_85 (Conv2D) (None, 1, 1, 320) 655360 ['mixed9[0][0]']
batch_normalization_87 (BatchN (None, 1, 1, 384) 1152 ['conv2d_87[0][0]']
ormalization)
batch_normalization_88 (BatchN (None, 1, 1, 384) 1152 ['conv2d_88[0][0]']
ormalization)
batch_normalization_91 (BatchN (None, 1, 1, 384) 1152 ['conv2d_91[0][0]']
ormalization)
batch_normalization_92 (BatchN (None, 1, 1, 384) 1152 ['conv2d_92[0][0]']
ormalization)
conv2d_93 (Conv2D) (None, 1, 1, 192) 393216 ['average_pooling2d_8[0][0]']
batch_normalization_85 (BatchN (None, 1, 1, 320) 960 ['conv2d_85[0][0]']
ormalization)
activation_87 (Activation) (None, 1, 1, 384) 0 ['batch_normalization_87[0][0]']
activation_88 (Activation) (None, 1, 1, 384) 0 ['batch_normalization_88[0][0]']
activation_91 (Activation) (None, 1, 1, 384) 0 ['batch_normalization_91[0][0]']
activation_92 (Activation) (None, 1, 1, 384) 0 ['batch_normalization_92[0][0]']
batch_normalization_93 (BatchN (None, 1, 1, 192) 576 ['conv2d_93[0][0]']
ormalization)
activation_85 (Activation) (None, 1, 1, 320) 0 ['batch_normalization_85[0][0]']
mixed9_1 (Concatenate) (None, 1, 1, 768) 0 ['activation_87[0][0]',
'activation_88[0][0]']
concatenate_1 (Concatenate) (None, 1, 1, 768) 0 ['activation_91[0][0]',
'activation_92[0][0]']
activation_93 (Activation) (None, 1, 1, 192) 0 ['batch_normalization_93[0][0]']
mixed10 (Concatenate) (None, 1, 1, 2048) 0 ['activation_85[0][0]',
'mixed9_1[0][0]',
'concatenate_1[0][0]',
'activation_93[0][0]']
flatten (Flatten) (None, 2048) 0 ['mixed10[0][0]']
dense (Dense) (None, 256) 524544 ['flatten[0][0]']
dense_1 (Dense) (None, 128) 32896 ['dense[0][0]']
dropout (Dropout) (None, 128) 0 ['dense_1[0][0]']
dense_2 (Dense) (None, 64) 8256 ['dropout[0][0]']
batch_normalization_94 (BatchN (None, 64) 256 ['dense_2[0][0]']
ormalization)
dense_3 (Dense) (None, 2) 130 ['batch_normalization_94[0][0]']
==================================================================================================
Total params: 22,368,866
Trainable params: 22,334,306
Non-trainable params: 34,560
__________________________________________________________________________________________________
Epoch 1/20
312/312 [==============================] - 469s 1s/step - loss: 0.4045 - accuracy: 0.8013 - val_loss: 1.0769 - val_accuracy: 0.8480
Epoch 2/20
312/312 [==============================] - 459s 1s/step - loss: 0.2649 - accuracy: 0.8904 - val_loss: 0.2498 - val_accuracy: 0.8970
Epoch 3/20
312/312 [==============================] - 457s 1s/step - loss: 0.2507 - accuracy: 0.8955 - val_loss: 1.0370 - val_accuracy: 0.8423
Epoch 4/20
312/312 [==============================] - 455s 1s/step - loss: 0.2533 - accuracy: 0.8972 - val_loss: 7.8545 - val_accuracy: 0.6416
Calculating Accuracy:
82/82 [==============================] - 9s 93ms/step - loss: 1.1998 - accuracy: 0.5000
Test Accuracy: 0.5
Classification Report
precision recall f1-score support
0 0.50 1.00 0.67 1300
1 0.00 0.00 0.00 1300
accuracy 0.50 2600
macro avg 0.25 0.50 0.33 2600
weighted avg 0.25 0.50 0.33 2600
Confusion Matrix
Train and Validation Accuracy